sessionInfo()
## R version 3.5.1 (2018-07-02)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17134)
## 
## Matrix products: default
## 
## locale:
## [1] LC_COLLATE=English_United States.1252 
## [2] LC_CTYPE=English_United States.1252   
## [3] LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C                          
## [5] LC_TIME=English_United States.1252    
## 
## attached base packages:
## [1] stats     graphics  grDevices utils     datasets  methods   base     
## 
## loaded via a namespace (and not attached):
##  [1] compiler_3.5.1  magrittr_1.5    tools_3.5.1     htmltools_0.3.6
##  [5] yaml_2.2.0      Rcpp_1.0.0      stringi_1.2.4   rmarkdown_1.11 
##  [9] knitr_1.20      stringr_1.3.1   digest_0.6.18   evaluate_0.12

User Inputs

output.var = params$output.var 
transform.abs = params$transform.abs
log.pred = params$log.pred
eda = params$eda
algo.forward = params$algo.forward
algo.backward = params$algo.backward
algo.stepwise = params$algo.stepwise
algo.LASSO = params$algo.LASSO
algo.LARS = params$algo.LARS
  
algo.forward.caret = params$algo.forward.caret
algo.backward.caret = params$algo.backward.caret
algo.stepwise.caret = params$algo.stepwise.caret
algo.LASSO.caret = params$algo.LASSO.caret
algo.LARS.caret = params$algo.LARS.caret

message("Parameters used for training/prediction: ")
## Parameters used for training/prediction:
str(params)
## List of 14
##  $ output.var         : chr "y3"
##  $ transform.abs      : logi FALSE
##  $ log.pred           : logi FALSE
##  $ eda                : logi FALSE
##  $ algo.forward       : logi FALSE
##  $ algo.backward      : logi FALSE
##  $ algo.stepwise      : logi FALSE
##  $ algo.LASSO         : logi FALSE
##  $ algo.LARS          : logi FALSE
##  $ algo.forward.caret : logi TRUE
##  $ algo.backward.caret: logi TRUE
##  $ algo.stepwise.caret: logi TRUE
##  $ algo.LASSO.caret   : logi TRUE
##  $ algo.LARS.caret    : logi TRUE
# Setup Labels
# alt.scale.label.name = Alternate Scale variable name
#   - if predicting on log, then alt.scale is normal scale
#   - if predicting on normal scale, then alt.scale is log scale
if (log.pred == TRUE){
  label.names = paste('log.',output.var,sep="")
  alt.scale.label.name = output.var
}
if (log.pred == FALSE){
  label.names = output.var
  alt.scale.label.name = paste('log.',output.var,sep="")
}

Prepare Data

Read and Clean Features

features = read.csv("../../Data/features.csv")
features.highprec = read.csv("../../Data/features_highprec.csv")
all.equal(features, features.highprec)
##  [1] "Component \"x11\": Mean relative difference: 0.001401482"     
##  [2] "Component \"stat9\": Mean relative difference: 0.0002946299"  
##  [3] "Component \"stat12\": Mean relative difference: 0.0005151515" 
##  [4] "Component \"stat13\": Mean relative difference: 0.001354369"  
##  [5] "Component \"stat18\": Mean relative difference: 0.0005141104" 
##  [6] "Component \"stat22\": Mean relative difference: 0.001135977"  
##  [7] "Component \"stat25\": Mean relative difference: 0.0001884615" 
##  [8] "Component \"stat29\": Mean relative difference: 0.001083691"  
##  [9] "Component \"stat36\": Mean relative difference: 0.00021513"   
## [10] "Component \"stat37\": Mean relative difference: 0.0004578125" 
## [11] "Component \"stat43\": Mean relative difference: 0.0003473684" 
## [12] "Component \"stat45\": Mean relative difference: 0.0002951699" 
## [13] "Component \"stat46\": Mean relative difference: 0.0009745763" 
## [14] "Component \"stat47\": Mean relative difference: 8.829902e-05" 
## [15] "Component \"stat55\": Mean relative difference: 0.001438066"  
## [16] "Component \"stat57\": Mean relative difference: 0.0001056911" 
## [17] "Component \"stat58\": Mean relative difference: 0.0004882261" 
## [18] "Component \"stat60\": Mean relative difference: 0.0002408377" 
## [19] "Component \"stat62\": Mean relative difference: 0.0004885106" 
## [20] "Component \"stat66\": Mean relative difference: 1.73913e-06"  
## [21] "Component \"stat67\": Mean relative difference: 0.0006265823" 
## [22] "Component \"stat73\": Mean relative difference: 0.003846154"  
## [23] "Component \"stat75\": Mean relative difference: 0.002334906"  
## [24] "Component \"stat83\": Mean relative difference: 0.0005628415" 
## [25] "Component \"stat86\": Mean relative difference: 0.0006104418" 
## [26] "Component \"stat94\": Mean relative difference: 0.001005115"  
## [27] "Component \"stat97\": Mean relative difference: 0.0003551913" 
## [28] "Component \"stat98\": Mean relative difference: 0.0006157635" 
## [29] "Component \"stat106\": Mean relative difference: 0.0008267717"
## [30] "Component \"stat109\": Mean relative difference: 0.0005121359"
## [31] "Component \"stat110\": Mean relative difference: 0.0007615527"
## [32] "Component \"stat111\": Mean relative difference: 0.001336134" 
## [33] "Component \"stat114\": Mean relative difference: 7.680492e-05"
## [34] "Component \"stat117\": Mean relative difference: 0.0002421784"
## [35] "Component \"stat122\": Mean relative difference: 0.0006521084"
## [36] "Component \"stat123\": Mean relative difference: 8.333333e-05"
## [37] "Component \"stat125\": Mean relative difference: 0.002385135" 
## [38] "Component \"stat130\": Mean relative difference: 0.001874016" 
## [39] "Component \"stat132\": Mean relative difference: 0.0003193182"
## [40] "Component \"stat135\": Mean relative difference: 0.0001622517"
## [41] "Component \"stat136\": Mean relative difference: 7.722008e-05"
## [42] "Component \"stat138\": Mean relative difference: 0.0009739953"
## [43] "Component \"stat143\": Mean relative difference: 0.0004845361"
## [44] "Component \"stat146\": Mean relative difference: 0.0005821596"
## [45] "Component \"stat148\": Mean relative difference: 0.0005366922"
## [46] "Component \"stat153\": Mean relative difference: 0.0001557522"
## [47] "Component \"stat154\": Mean relative difference: 0.001351916" 
## [48] "Component \"stat157\": Mean relative difference: 0.0005427928"
## [49] "Component \"stat162\": Mean relative difference: 0.002622951" 
## [50] "Component \"stat167\": Mean relative difference: 0.0005905172"
## [51] "Component \"stat168\": Mean relative difference: 0.0002791096"
## [52] "Component \"stat169\": Mean relative difference: 0.0004121827"
## [53] "Component \"stat170\": Mean relative difference: 0.0004705882"
## [54] "Component \"stat174\": Mean relative difference: 0.0003822894"
## [55] "Component \"stat179\": Mean relative difference: 0.0008286604"
## [56] "Component \"stat184\": Mean relative difference: 0.0007526718"
## [57] "Component \"stat187\": Mean relative difference: 0.0005122768"
## [58] "Component \"stat193\": Mean relative difference: 4.215116e-05"
## [59] "Component \"stat199\": Mean relative difference: 0.002155844" 
## [60] "Component \"stat203\": Mean relative difference: 0.0003738318"
## [61] "Component \"stat213\": Mean relative difference: 0.000667676" 
## [62] "Component \"stat215\": Mean relative difference: 0.0003997955"
head(features)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10      x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.05e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.03e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.06e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.47e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.01e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.07e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
head(features.highprec)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10          x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.050025e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.034518e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.062312e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.471887e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.010552e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.071662e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
features = features.highprec
#str(features) 

Checking correlations to evaluate removal of redundant features

corr.matrix = round(cor(features[sapply(features, is.numeric)]),2)

# filter out only highly correlated variables
threshold = 0.6
corr.matrix.tmp = corr.matrix
diag(corr.matrix.tmp) = 0
high.corr = apply(abs(corr.matrix.tmp) >= threshold, 1, any)
high.corr.matrix = corr.matrix.tmp[high.corr, high.corr]

DT::datatable(corr.matrix)
DT::datatable(high.corr.matrix)

Feature Names

feature.names = colnames(features)
drops <- c('JobName')
feature.names = feature.names[!(feature.names %in% drops)]
#str(feature.names)

Read and Clean Labels

labels = read.csv("../../Data/labels.csv")
#str(labels)
labels = labels[,c("JobName", output.var)]
summary(labels)
##       JobName           y3        
##  Job_00001:   1   Min.   : 95.91  
##  Job_00002:   1   1st Qu.:118.21  
##  Job_00003:   1   Median :123.99  
##  Job_00004:   1   Mean   :125.36  
##  Job_00005:   1   3rd Qu.:131.06  
##  Job_00006:   1   Max.   :193.73  
##  (Other)  :9994   NA's   :2497

Merge Datasets

data <- merge(features, labels, by = 'JobName')
drops <- c('JobName')
data = data[,(!colnames(data) %in% drops)]
#str(data)

Transformations

if (transform.abs == TRUE){
  data[,label.names] = 10^(data[,label.names]/20)
  data = filter(data, y3 < 1E7)
}


#str(data)
if (log.pred == TRUE){
  data[label.names] = log(data[alt.scale.label.name],10)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}
#str(data)

Remove NA Cases

data = data[complete.cases(data),]

Exploratory Data Analysis

Check correlation of Label with Featires

if (eda == TRUE){
  corr.to.label =round(cor(dplyr::select(data,-one_of(label.names)),dplyr::select_at(data,label.names)),4)
  DT::datatable(corr.to.label)
}

Multicollinearity - VIF

if (eda == TRUE){
  vifDF = usdm::vif(select_at(data,feature.names)) %>% arrange(desc(VIF))
  head(vifDF,10)
}

Scatterplots

panel.hist <- function(x, ...)
{
    usr <- par("usr"); on.exit(par(usr))
    par(usr = c(usr[1:2], 0, 1.5) )
    h <- hist(x, plot = FALSE)
    breaks <- h$breaks; nB <- length(breaks)
    y <- h$counts; y <- y/max(y)
    rect(breaks[-nB], 0, breaks[-1], y, col = "cyan", ...)
}
if (eda == TRUE){
  histogram(data[ ,label.names])
  #hist(data[complete.cases(data),alt.scale.label.name])
}
# https://stackoverflow.com/questions/24648729/plot-one-numeric-variable-against-n-numeric-variables-in-n-plots
ind.pairs.plot <- function(data, xvars=NULL, yvar)
{
    df <- data
    if (is.null(xvars)) {
        xvars = names(data[which(names(data)!=yvar)])       
    }   

    #choose a format to display charts
    ncharts <- length(xvars) 
    
    for(i in 1:ncharts){    
        plot(df[,xvars[i]],df[,yvar], xlab = xvars[i], ylab = yvar)
    }
}

if (eda == TRUE){
  ind.pairs.plot(data, feature.names, label.names)
}

# 
# pl <- ggplot(data, aes(x=x18, y = y3))
# pl2 <- pl + geom_point(aes(alpha = 0.1)) # default color gradient based on 'hp'
# print(pl2)

Feature Engineering

if(eda ==FALSE){
  # x18 may need transformations
  plot(data[,'x18'], data[,label.names], main = "Original Scatter Plot vs. x18", ylab = label.names, xlab = 'x18')
  plot(sqrt(data[,'x18']), data[,label.names], main = "Original Scatter Plot vs. sqrt(x18)", ylab = label.names, xlab = 'sqrt(x18)')
  
  # transforming x18
  data$sqrt.x18 = sqrt(data$x18)
  data = dplyr::select(data,-one_of('x18'))
  
  # what about x7, x9?
  # x11 looks like data is at discrete points after a while. Will this be a problem?
}

Modeling

Train Test Split

data = data[sample(nrow(data)),] # randomly shuffle data
split = sample.split(data[,label.names], SplitRatio = 0.8)

data.train = subset(data, split == TRUE)
data.test = subset(data, split == FALSE)

Common Functions

plot.diagnostics <-  function(model, train) {
  plot(model)
  
  residuals = resid(model) # Plotted above in plot(lm.out)
  r.standard = rstandard(model)
  r.student = rstudent(model)

  plot(predict(model,train),r.student,
      ylab="Student Residuals", xlab="Predicted Values", 
      main="Student Residual Plot") 
  abline(0, 0)
  
  plot(predict(model, train),r.standard,
      ylab="Standard Residuals", xlab="Predicted Values", 
      main="Standard Residual Plot") 
  abline(0, 0)
  abline(2, 0)
  abline(-2, 0)
  
  # Histogram
  hist(r.student, freq=FALSE, main="Distribution of Studentized Residuals", 
  xlab="Studentized Residuals", ylab="Density", ylim=c(0,0.5))

  # Create range of x-values for normal curve
  xfit <- seq(min(r.student)-1, max(r.student)+1, length=40)

  # Generate values from the normal distribution at the specified values
  yfit <- (dnorm(xfit))

  # Add the normal curve
  lines(xfit, yfit, ylim=c(0,0.5))
  
  
  # http://www.stat.columbia.edu/~martin/W2024/R7.pdf
  # Influential plots
  inf.meas = influence.measures(model)
  # print (summary(inf.meas)) # too much data
  
  # Leverage plot
  lev = hat(model.matrix(model))
  plot(lev, ylab = 'Leverage - check')
  
  # Cook's Distance
  cd = cooks.distance(model)
  plot(cd,ylab="Cooks distances")
  abline(4/nrow(train),0)
  abline(1,0)
  
  print (paste("Number of data points that have Cook's D > 4/n: ", length(cd[cd > 4/nrow(train)]), sep = "")) 
  print (paste("Number of data points that have Cook's D > 1: ", length(cd[cd > 1]), sep = "")) 
  return(cd)
}

train.caret.glmselect = function(formula, data, method
                                 ,subopt = NULL, feature.names
                                 , train.control = NULL, tune.grid = NULL, pre.proc = NULL){
  
  if(is.null(train.control)){
    train.control <- trainControl(method = "cv"
                              ,number = 10
                              ,search = "grid"
                              ,verboseIter = TRUE
                              ,allowParallel = TRUE
                              )
  }
  
  if(is.null(tune.grid)){
    if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
      tune.grid = data.frame(nvmax = 1:length(feature.names))
    }
    if (method == 'glmnet' && subopt == 'LASSO'){
      # Will only show 1 Lambda value during training, but that is OK
      # https://stackoverflow.com/questions/47526544/why-need-to-tune-lambda-with-carettrain-method-glmnet-and-cv-glmnet
      # Another option for LASSO is this: https://github.com/topepo/caret/blob/master/RegressionTests/Code/lasso.R
      lambda = 10^seq(-2,0, length =100)
      alpha = c(1)
      tune.grid = expand.grid(alpha = alpha,lambda = lambda)
    }
    if (method == 'lars'){
      # https://github.com/topepo/caret/blob/master/RegressionTests/Code/lars.R
      fraction = seq(0, 1, length = 100)
      tune.grid = expand.grid(fraction = fraction)
      pre.proc = c("center", "scale") 
    }
  }
  
  # http://sshaikh.org/2015/05/06/parallelize-machine-learning-in-r-with-multi-core-cpus/
  cl <- makeCluster(detectCores()*0.75) # use 75% of cores only, leave rest for other tasks
  registerDoParallel(cl)

  set.seed(1) 
  # note that the seed has to actually be set just before this function is called
  # settign is above just not ensure reproducibility for some reason
  model.caret <- caret::train(formula
                              , data = data
                              , method = method
                              , tuneGrid = tune.grid
                              , trControl = train.control
                              , preProc = pre.proc
                              )
  
  stopCluster(cl)
  registerDoSEQ() # register sequential engine in case you are not using this function anymore
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    print(model.caret$results) # all model results
    print(model.caret$bestTune) # best model
  
    model = model.caret$finalModel

    # Metrics Plot 
    dataPlot = model.caret$results %>%
      gather(key='metric',value='value',-nvmax) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=nvmax,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot
    # leap function does not support studentized residuals
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)
   
    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)
    
    # Provides the coefficients of the best model
    id = rownames(model.caret$bestTune)
    message("Coefficients of final model:")
    print (coef(model, id = id))
    
    return(list(model = model,id = id, residPlot = residPlot, residHistogram=residHistogram))
  }
  if (method == 'glmnet' && subopt == 'LASSO'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    print(model.caret$results)
    
    # Metrics Plot 
    dataPlot = model.caret$results %>%
      gather(key='metric',value='value',-lambda) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=lambda,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot 
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') +
      theme_light()
    plot(residHistogram)

    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id, residPlot = residPlot, metricsPlot=metricsPlot ))
  }
  if (method == 'lars'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    # Metrics Plot
    dataPlot = model.caret$results %>%
        gather(key='metric',value='value',-fraction) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=fraction,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)

    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id, residPlot = residPlot, residHistogram=residHistogram))
  }
}

# https://stackoverflow.com/questions/48265743/linear-model-subset-selection-goodness-of-fit-with-k-fold-cross-validation
# changed slightly since call[[2]] was just returning "formula" without actually returnign the value in formula
predict.regsubsets <- function(object, newdata, id, formula, ...) {
    #form <- as.formula(object$call[[2]])
    mat <- model.matrix(formula, newdata) # adds intercept and expands any interaction terms
    coefi <- coef(object, id = id)
    xvars <- names(coefi)
    return(mat[,xvars]%*%coefi)
}
  
test.model = function(model, test, level=0.95
                      ,draw.limits = FALSE, good = 0.1, ok = 0.15
                      ,method = NULL, subopt = NULL
                      ,id = NULL, formula, feature.names, label.names){
  ## if using caret for glm select equivalent functionality, 
  ## need to pass formula (full is ok as it will select subset of variables from there)
  if (is.null(method)){
    pred = predict(model, newdata=test, interval="confidence", level = level) 
  }
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    pred = predict.regsubsets(model, newdata = test, id = id, formula = formula)
  }
  
  if (method == 'glmnet' && subopt == 'LASSO'){
    xtest = as.matrix(test[,feature.names]) 
    pred=as.data.frame(predict(model, xtest))
  }
  
  if (method == 'lars'){
    pred=as.data.frame(predict(model, newdata = test))
  }
    
  # Summary of predicted values
  print ("Summary of predicted values: ")
  print(summary(pred[,1]))

  test.mse = mean((test[,label.names]-pred[,1])^2)
  print (paste(method, subopt, "Test MSE:", test.mse, sep=" "))

  plot(test[,label.names],pred[,1],xlab = "Actual", ylab = "Predicted")
  abline(0,(1+good),col='green', lwd = 3)
  abline(0,(1-good),col='green', lwd = 3)
  abline(0,(1+ok),col='blue', lwd = 3)
  abline(0,(1-ok),col='blue', lwd = 3)
  
}

Setup Formulae

n <- names(data.train)
 formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~", paste(n[!n %in% label.names], collapse = " + "))) 

# ind.interact = c("x4","x7","x8", "x9", "x10", "x11", "x14", "x16", "x17", "x21", "sqrt.x18")
# ind.nointeract = c("stat13", "stat14", "stat24", "stat60", "stat98", "stat110", "stat144", "stat149")
# 
# interact = paste(ind.interact, collapse = " + ")
# nointeract = paste(ind.nointeract, collapse = " + ")
# 
# # ^2 is 2 way interaction, ^3 is 3 way interaction
# formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + "), "~ (", interact, " )^2 ", " + ", nointeract ))
# 
# # # * is all way interaction
# # formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + "), "~ (", interact, " ) ", " + ", nointeract ))

grand.mean.formula = as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~ 1"))

print(formula)
## y3 ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + 
##     x12 + x13 + x14 + x15 + x16 + x17 + x19 + x20 + x21 + x22 + 
##     x23 + stat1 + stat2 + stat3 + stat4 + stat5 + stat6 + stat7 + 
##     stat8 + stat9 + stat10 + stat11 + stat12 + stat13 + stat14 + 
##     stat15 + stat16 + stat17 + stat18 + stat19 + stat20 + stat21 + 
##     stat22 + stat23 + stat24 + stat25 + stat26 + stat27 + stat28 + 
##     stat29 + stat30 + stat31 + stat32 + stat33 + stat34 + stat35 + 
##     stat36 + stat37 + stat38 + stat39 + stat40 + stat41 + stat42 + 
##     stat43 + stat44 + stat45 + stat46 + stat47 + stat48 + stat49 + 
##     stat50 + stat51 + stat52 + stat53 + stat54 + stat55 + stat56 + 
##     stat57 + stat58 + stat59 + stat60 + stat61 + stat62 + stat63 + 
##     stat64 + stat65 + stat66 + stat67 + stat68 + stat69 + stat70 + 
##     stat71 + stat72 + stat73 + stat74 + stat75 + stat76 + stat77 + 
##     stat78 + stat79 + stat80 + stat81 + stat82 + stat83 + stat84 + 
##     stat85 + stat86 + stat87 + stat88 + stat89 + stat90 + stat91 + 
##     stat92 + stat93 + stat94 + stat95 + stat96 + stat97 + stat98 + 
##     stat99 + stat100 + stat101 + stat102 + stat103 + stat104 + 
##     stat105 + stat106 + stat107 + stat108 + stat109 + stat110 + 
##     stat111 + stat112 + stat113 + stat114 + stat115 + stat116 + 
##     stat117 + stat118 + stat119 + stat120 + stat121 + stat122 + 
##     stat123 + stat124 + stat125 + stat126 + stat127 + stat128 + 
##     stat129 + stat130 + stat131 + stat132 + stat133 + stat134 + 
##     stat135 + stat136 + stat137 + stat138 + stat139 + stat140 + 
##     stat141 + stat142 + stat143 + stat144 + stat145 + stat146 + 
##     stat147 + stat148 + stat149 + stat150 + stat151 + stat152 + 
##     stat153 + stat154 + stat155 + stat156 + stat157 + stat158 + 
##     stat159 + stat160 + stat161 + stat162 + stat163 + stat164 + 
##     stat165 + stat166 + stat167 + stat168 + stat169 + stat170 + 
##     stat171 + stat172 + stat173 + stat174 + stat175 + stat176 + 
##     stat177 + stat178 + stat179 + stat180 + stat181 + stat182 + 
##     stat183 + stat184 + stat185 + stat186 + stat187 + stat188 + 
##     stat189 + stat190 + stat191 + stat192 + stat193 + stat194 + 
##     stat195 + stat196 + stat197 + stat198 + stat199 + stat200 + 
##     stat201 + stat202 + stat203 + stat204 + stat205 + stat206 + 
##     stat207 + stat208 + stat209 + stat210 + stat211 + stat212 + 
##     stat213 + stat214 + stat215 + stat216 + stat217 + sqrt.x18
print(grand.mean.formula)
## y3 ~ 1
# Update feature.names because we may have transformed some features
feature.names = n[!n %in% label.names]

Full Model

model.full = lm(formula , data.train)
summary(model.full)
## 
## Call:
## lm(formula = formula, data = data.train)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -22.080  -6.100  -1.750   4.428  54.524 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  9.075e+01  2.769e+00  32.778  < 2e-16 ***
## x1          -1.992e-01  1.900e-01  -1.048   0.2947    
## x2           1.158e-01  1.216e-01   0.952   0.3410    
## x3           2.098e-02  3.333e-02   0.629   0.5290    
## x4          -1.431e-02  2.632e-03  -5.435 5.72e-08 ***
## x5           4.870e-02  8.624e-02   0.565   0.5723    
## x6           4.663e-02  1.742e-01   0.268   0.7889    
## x7           3.339e+00  1.843e-01  18.116  < 2e-16 ***
## x8           1.109e-01  4.306e-02   2.575   0.0101 *  
## x9           9.361e-01  9.630e-02   9.721  < 2e-16 ***
## x10          3.927e-01  8.945e-02   4.390 1.16e-05 ***
## x11          3.639e+07  2.146e+07   1.696   0.0900 .  
## x12         -2.819e-02  5.476e-02  -0.515   0.6067    
## x13          2.245e-02  2.187e-02   1.026   0.3047    
## x14         -1.627e-01  9.436e-02  -1.724   0.0848 .  
## x15          3.431e-02  9.012e-02   0.381   0.7035    
## x16          2.873e-01  6.194e-02   4.638 3.59e-06 ***
## x17          4.325e-01  9.437e-02   4.583 4.68e-06 ***
## x19          3.053e-02  4.828e-02   0.632   0.5272    
## x20         -2.197e-01  3.338e-01  -0.658   0.5104    
## x21          2.897e-02  1.232e-02   2.351   0.0187 *  
## x22         -7.918e-02  1.008e-01  -0.785   0.4323    
## x23         -4.955e-02  9.596e-02  -0.516   0.6056    
## stat1       -5.220e-02  7.245e-02  -0.721   0.4712    
## stat2        4.942e-02  7.200e-02   0.686   0.4925    
## stat3        1.351e-01  7.242e-02   1.866   0.0621 .  
## stat4       -1.579e-01  7.222e-02  -2.187   0.0288 *  
## stat5       -5.071e-02  7.283e-02  -0.696   0.4863    
## stat6       -1.188e-01  7.239e-02  -1.641   0.1009    
## stat7       -3.215e-02  7.221e-02  -0.445   0.6561    
## stat8       -1.742e-02  7.225e-02  -0.241   0.8095    
## stat9       -7.690e-03  7.223e-02  -0.106   0.9152    
## stat10      -1.285e-01  7.240e-02  -1.774   0.0761 .  
## stat11      -3.674e-02  7.315e-02  -0.502   0.6155    
## stat12       4.804e-02  7.161e-02   0.671   0.5023    
## stat13      -9.702e-02  7.217e-02  -1.344   0.1789    
## stat14      -2.259e-01  7.196e-02  -3.140   0.0017 ** 
## stat15      -6.247e-02  7.172e-02  -0.871   0.3838    
## stat16       1.167e-02  7.197e-02   0.162   0.8712    
## stat17       1.223e-02  7.173e-02   0.171   0.8646    
## stat18      -9.609e-02  7.194e-02  -1.336   0.1817    
## stat19       1.122e-01  7.163e-02   1.566   0.1175    
## stat20      -8.680e-02  7.209e-02  -1.204   0.2287    
## stat21      -4.262e-02  7.270e-02  -0.586   0.5577    
## stat22      -1.024e-01  7.227e-02  -1.417   0.1565    
## stat23       1.731e-01  7.214e-02   2.399   0.0165 *  
## stat24      -1.565e-01  7.240e-02  -2.161   0.0307 *  
## stat25      -9.563e-02  7.232e-02  -1.322   0.1861    
## stat26      -5.774e-02  7.176e-02  -0.805   0.4211    
## stat27      -8.833e-03  7.248e-02  -0.122   0.9030    
## stat28       5.402e-02  7.239e-02   0.746   0.4555    
## stat29       3.798e-02  7.253e-02   0.524   0.6005    
## stat30       6.557e-02  7.299e-02   0.898   0.3691    
## stat31      -2.819e-02  7.280e-02  -0.387   0.6986    
## stat32       2.399e-02  7.289e-02   0.329   0.7421    
## stat33      -1.769e-01  7.212e-02  -2.453   0.0142 *  
## stat34       1.821e-02  7.235e-02   0.252   0.8012    
## stat35      -1.109e-01  7.237e-02  -1.532   0.1255    
## stat36       1.326e-02  7.156e-02   0.185   0.8530    
## stat37      -1.366e-01  7.289e-02  -1.875   0.0609 .  
## stat38       1.693e-01  7.253e-02   2.335   0.0196 *  
## stat39      -1.269e-01  7.206e-02  -1.761   0.0783 .  
## stat40       2.005e-02  7.202e-02   0.278   0.7808    
## stat41      -1.131e-01  7.147e-02  -1.583   0.1136    
## stat42      -1.056e-01  7.207e-02  -1.466   0.1428    
## stat43      -9.940e-02  7.230e-02  -1.375   0.1693    
## stat44      -1.030e-02  7.189e-02  -0.143   0.8861    
## stat45      -1.059e-01  7.241e-02  -1.463   0.1435    
## stat46       7.012e-02  7.292e-02   0.962   0.3363    
## stat47       9.436e-02  7.274e-02   1.297   0.1946    
## stat48       6.637e-02  7.221e-02   0.919   0.3581    
## stat49       3.700e-02  7.176e-02   0.516   0.6062    
## stat50       8.195e-02  7.186e-02   1.140   0.2542    
## stat51       1.034e-01  7.198e-02   1.436   0.1510    
## stat52      -7.706e-02  7.222e-02  -1.067   0.2860    
## stat53      -8.366e-03  7.291e-02  -0.115   0.9086    
## stat54      -9.544e-02  7.257e-02  -1.315   0.1885    
## stat55       7.299e-02  7.166e-02   1.018   0.3085    
## stat56      -1.112e-02  7.252e-02  -0.153   0.8781    
## stat57      -5.677e-02  7.184e-02  -0.790   0.4294    
## stat58       5.987e-03  7.155e-02   0.084   0.9333    
## stat59       6.685e-02  7.202e-02   0.928   0.3534    
## stat60       1.186e-01  7.233e-02   1.639   0.1012    
## stat61       1.279e-02  7.204e-02   0.178   0.8591    
## stat62      -3.736e-02  7.186e-02  -0.520   0.6032    
## stat63       8.660e-02  7.234e-02   1.197   0.2313    
## stat64      -9.877e-02  7.155e-02  -1.380   0.1675    
## stat65      -7.995e-02  7.258e-02  -1.102   0.2707    
## stat66       9.041e-02  7.330e-02   1.233   0.2175    
## stat67      -1.127e-02  7.275e-02  -0.155   0.8769    
## stat68      -4.156e-02  7.239e-02  -0.574   0.5660    
## stat69      -2.717e-03  7.187e-02  -0.038   0.9698    
## stat70       6.679e-02  7.192e-02   0.929   0.3531    
## stat71      -1.903e-02  7.192e-02  -0.265   0.7913    
## stat72       6.953e-02  7.275e-02   0.956   0.3393    
## stat73       1.179e-01  7.243e-02   1.628   0.1035    
## stat74      -2.409e-02  7.246e-02  -0.332   0.7395    
## stat75      -4.956e-02  7.280e-02  -0.681   0.4961    
## stat76      -4.243e-03  7.231e-02  -0.059   0.9532    
## stat77      -5.256e-02  7.219e-02  -0.728   0.4666    
## stat78      -6.536e-02  7.248e-02  -0.902   0.3673    
## stat79      -5.659e-03  7.226e-02  -0.078   0.9376    
## stat80       3.688e-02  7.244e-02   0.509   0.6107    
## stat81       7.130e-02  7.245e-02   0.984   0.3251    
## stat82       8.624e-02  7.235e-02   1.192   0.2333    
## stat83      -1.186e-02  7.223e-02  -0.164   0.8696    
## stat84       1.048e-03  7.237e-02   0.014   0.9884    
## stat85      -8.723e-03  7.237e-02  -0.121   0.9041    
## stat86      -1.255e-02  7.255e-02  -0.173   0.8626    
## stat87      -7.880e-02  7.247e-02  -1.087   0.2769    
## stat88      -7.227e-02  7.209e-02  -1.002   0.3161    
## stat89      -9.006e-02  7.192e-02  -1.252   0.2106    
## stat90      -4.511e-02  7.238e-02  -0.623   0.5332    
## stat91      -4.485e-02  7.181e-02  -0.625   0.5323    
## stat92      -1.365e-01  7.208e-02  -1.893   0.0584 .  
## stat93      -1.161e-01  7.308e-02  -1.588   0.1122    
## stat94      -5.654e-02  7.267e-02  -0.778   0.4366    
## stat95       1.831e-02  7.204e-02   0.254   0.7994    
## stat96      -2.878e-02  7.206e-02  -0.399   0.6896    
## stat97      -8.116e-05  7.184e-02  -0.001   0.9991    
## stat98       1.027e+00  7.138e-02  14.394  < 2e-16 ***
## stat99       4.184e-02  7.244e-02   0.578   0.5635    
## stat100      1.863e-01  7.243e-02   2.572   0.0101 *  
## stat101     -7.866e-02  7.302e-02  -1.077   0.2814    
## stat102      6.142e-03  7.254e-02   0.085   0.9325    
## stat103     -6.664e-02  7.303e-02  -0.912   0.3616    
## stat104     -7.611e-02  7.195e-02  -1.058   0.2902    
## stat105      8.686e-02  7.170e-02   1.212   0.2257    
## stat106     -1.052e-01  7.203e-02  -1.461   0.1442    
## stat107     -1.006e-01  7.232e-02  -1.392   0.1641    
## stat108     -5.691e-02  7.219e-02  -0.788   0.4306    
## stat109      1.218e-02  7.240e-02   0.168   0.8664    
## stat110     -9.773e-01  7.196e-02 -13.581  < 2e-16 ***
## stat111      2.472e-02  7.228e-02   0.342   0.7324    
## stat112      2.515e-02  7.266e-02   0.346   0.7293    
## stat113     -4.837e-02  7.262e-02  -0.666   0.5054    
## stat114      2.615e-02  7.268e-02   0.360   0.7190    
## stat115      9.549e-02  7.227e-02   1.321   0.1865    
## stat116      8.705e-02  7.300e-02   1.192   0.2332    
## stat117      4.194e-02  7.252e-02   0.578   0.5631    
## stat118     -7.551e-02  7.181e-02  -1.051   0.2931    
## stat119      5.965e-02  7.214e-02   0.827   0.4083    
## stat120      4.330e-02  7.178e-02   0.603   0.5464    
## stat121     -6.854e-02  7.266e-02  -0.943   0.3456    
## stat122     -4.274e-02  7.199e-02  -0.594   0.5527    
## stat123      3.010e-02  7.302e-02   0.412   0.6802    
## stat124     -3.671e-02  7.194e-02  -0.510   0.6099    
## stat125      6.534e-02  7.260e-02   0.900   0.3682    
## stat126      3.494e-02  7.214e-02   0.484   0.6282    
## stat127      4.698e-03  7.183e-02   0.065   0.9479    
## stat128      3.332e-03  7.196e-02   0.046   0.9631    
## stat129      1.105e-02  7.179e-02   0.154   0.8777    
## stat130      6.539e-02  7.268e-02   0.900   0.3683    
## stat131      1.066e-01  7.252e-02   1.470   0.1417    
## stat132     -7.599e-02  7.196e-02  -1.056   0.2910    
## stat133      2.264e-02  7.223e-02   0.313   0.7540    
## stat134     -9.044e-02  7.202e-02  -1.256   0.2092    
## stat135     -3.221e-02  7.231e-02  -0.445   0.6561    
## stat136      3.977e-04  7.238e-02   0.005   0.9956    
## stat137      2.659e-02  7.184e-02   0.370   0.7113    
## stat138      2.835e-02  7.237e-02   0.392   0.6952    
## stat139      3.644e-03  7.244e-02   0.050   0.9599    
## stat140     -7.415e-02  7.198e-02  -1.030   0.3030    
## stat141      5.894e-02  7.170e-02   0.822   0.4111    
## stat142      1.641e-02  7.277e-02   0.225   0.8216    
## stat143      4.762e-02  7.220e-02   0.660   0.5096    
## stat144      1.126e-01  7.153e-02   1.574   0.1156    
## stat145      2.442e-02  7.318e-02   0.334   0.7386    
## stat146     -6.109e-02  7.261e-02  -0.841   0.4002    
## stat147     -7.860e-02  7.304e-02  -1.076   0.2819    
## stat148     -1.082e-01  7.127e-02  -1.519   0.1289    
## stat149     -1.342e-01  7.239e-02  -1.853   0.0639 .  
## stat150      1.945e-02  7.257e-02   0.268   0.7887    
## stat151     -6.393e-02  7.344e-02  -0.870   0.3841    
## stat152     -7.665e-02  7.227e-02  -1.061   0.2889    
## stat153      6.845e-02  7.330e-02   0.934   0.3504    
## stat154     -6.556e-02  7.284e-02  -0.900   0.3681    
## stat155     -5.677e-02  7.203e-02  -0.788   0.4306    
## stat156      1.738e-01  7.262e-02   2.393   0.0167 *  
## stat157      1.333e-02  7.198e-02   0.185   0.8531    
## stat158     -8.631e-02  7.315e-02  -1.180   0.2381    
## stat159     -3.835e-02  7.183e-02  -0.534   0.5934    
## stat160     -5.616e-03  7.269e-02  -0.077   0.9384    
## stat161      1.076e-01  7.276e-02   1.479   0.1392    
## stat162     -3.696e-02  7.173e-02  -0.515   0.6064    
## stat163      1.492e-02  7.321e-02   0.204   0.8386    
## stat164      7.082e-02  7.238e-02   0.978   0.3279    
## stat165     -4.572e-02  7.190e-02  -0.636   0.5249    
## stat166     -7.083e-02  7.161e-02  -0.989   0.3227    
## stat167     -8.499e-02  7.202e-02  -1.180   0.2380    
## stat168     -2.496e-02  7.239e-02  -0.345   0.7302    
## stat169     -5.353e-02  7.254e-02  -0.738   0.4606    
## stat170     -4.438e-02  7.215e-02  -0.615   0.5386    
## stat171     -1.012e-02  7.268e-02  -0.139   0.8893    
## stat172      9.468e-02  7.230e-02   1.310   0.1904    
## stat173     -1.161e-02  7.231e-02  -0.161   0.8725    
## stat174      4.847e-03  7.213e-02   0.067   0.9464    
## stat175     -6.252e-02  7.275e-02  -0.859   0.3901    
## stat176     -5.332e-02  7.206e-02  -0.740   0.4593    
## stat177     -4.888e-02  7.276e-02  -0.672   0.5018    
## stat178      1.334e-02  7.348e-02   0.181   0.8560    
## stat179     -2.624e-02  7.165e-02  -0.366   0.7142    
## stat180     -1.097e-01  7.169e-02  -1.531   0.1259    
## stat181      6.825e-02  7.242e-02   0.942   0.3460    
## stat182      5.553e-02  7.227e-02   0.768   0.4423    
## stat183      1.354e-02  7.213e-02   0.188   0.8511    
## stat184      1.788e-02  7.277e-02   0.246   0.8060    
## stat185     -1.339e-02  7.144e-02  -0.187   0.8514    
## stat186      1.006e-02  7.270e-02   0.138   0.8899    
## stat187     -1.371e-01  7.198e-02  -1.905   0.0569 .  
## stat188     -1.827e-02  7.190e-02  -0.254   0.7994    
## stat189      2.075e-02  7.234e-02   0.287   0.7742    
## stat190      3.291e-02  7.161e-02   0.460   0.6458    
## stat191     -1.161e-01  7.226e-02  -1.607   0.1081    
## stat192      2.780e-02  7.307e-02   0.380   0.7036    
## stat193     -6.326e-02  7.287e-02  -0.868   0.3853    
## stat194      1.191e-03  7.215e-02   0.017   0.9868    
## stat195      1.032e-01  7.264e-02   1.421   0.1555    
## stat196      3.484e-02  7.300e-02   0.477   0.6332    
## stat197     -2.635e-02  7.139e-02  -0.369   0.7120    
## stat198     -8.211e-02  7.219e-02  -1.137   0.2554    
## stat199      1.566e-02  7.147e-02   0.219   0.8266    
## stat200     -1.121e-01  7.132e-02  -1.572   0.1160    
## stat201     -1.046e-02  7.196e-02  -0.145   0.8844    
## stat202     -3.204e-02  7.317e-02  -0.438   0.6615    
## stat203      3.601e-02  7.228e-02   0.498   0.6183    
## stat204     -1.459e-01  7.237e-02  -2.016   0.0438 *  
## stat205     -1.071e-01  7.212e-02  -1.486   0.1374    
## stat206     -7.380e-02  7.289e-02  -1.012   0.3114    
## stat207      6.258e-02  7.278e-02   0.860   0.3899    
## stat208      2.710e-03  7.241e-02   0.037   0.9701    
## stat209     -6.125e-03  7.185e-02  -0.085   0.9321    
## stat210     -4.706e-02  7.277e-02  -0.647   0.5178    
## stat211     -8.584e-02  7.216e-02  -1.190   0.2342    
## stat212      5.294e-02  7.245e-02   0.731   0.4650    
## stat213     -6.166e-02  7.230e-02  -0.853   0.3937    
## stat214     -1.186e-01  7.224e-02  -1.642   0.1006    
## stat215     -4.013e-02  7.257e-02  -0.553   0.5803    
## stat216     -2.120e-02  7.221e-02  -0.294   0.7690    
## stat217      7.815e-02  7.247e-02   1.078   0.2809    
## sqrt.x18     7.569e+00  2.760e-01  27.428  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 9.501 on 5761 degrees of freedom
## Multiple R-squared:  0.2523, Adjusted R-squared:  0.2211 
## F-statistic: 8.099 on 240 and 5761 DF,  p-value: < 2.2e-16
cd.full = plot.diagnostics(model.full, data.train)

## [1] "Number of data points that have Cook's D > 4/n: 288"
## [1] "Number of data points that have Cook's D > 1: 0"

Checking with removal of high influence points

high.cd = names(cd.full[cd.full > 4/nrow(data.train)])
data.train2 = data.train[!(rownames(data.train)) %in% high.cd,]
model.full2 = lm(formula , data.train2)
summary(model.full2)
## 
## Call:
## lm(formula = formula, data = data.train2)
## 
## Residuals:
##     Min      1Q  Median      3Q     Max 
## -17.944  -5.110  -1.018   4.491  21.381 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  8.679e+01  2.191e+00  39.603  < 2e-16 ***
## x1          -1.500e-01  1.503e-01  -0.998 0.318311    
## x2           8.684e-02  9.609e-02   0.904 0.366197    
## x3           1.217e-02  2.627e-02   0.463 0.643028    
## x4          -1.550e-02  2.080e-03  -7.454 1.04e-13 ***
## x5           1.173e-01  6.814e-02   1.721 0.085289 .  
## x6          -1.390e-01  1.374e-01  -1.011 0.311873    
## x7           3.459e+00  1.454e-01  23.788  < 2e-16 ***
## x8           1.427e-01  3.401e-02   4.196 2.76e-05 ***
## x9           9.024e-01  7.598e-02  11.877  < 2e-16 ***
## x10          4.758e-01  7.070e-02   6.730 1.87e-11 ***
## x11          5.413e+07  1.699e+07   3.187 0.001447 ** 
## x12          1.936e-02  4.311e-02   0.449 0.653403    
## x13          3.512e-02  1.732e-02   2.028 0.042583 *  
## x14         -8.882e-02  7.443e-02  -1.193 0.232793    
## x15          4.050e-02  7.107e-02   0.570 0.568755    
## x16          2.936e-01  4.893e-02   6.001 2.09e-09 ***
## x17          4.104e-01  7.448e-02   5.510 3.75e-08 ***
## x19          2.934e-02  3.814e-02   0.769 0.441719    
## x20         -2.082e-01  2.641e-01  -0.788 0.430444    
## x21          2.900e-02  9.742e-03   2.977 0.002923 ** 
## x22         -1.184e-01  7.959e-02  -1.488 0.136889    
## x23          2.442e-02  7.581e-02   0.322 0.747351    
## stat1       -1.024e-01  5.711e-02  -1.793 0.073032 .  
## stat2        6.227e-02  5.685e-02   1.095 0.273465    
## stat3        1.285e-01  5.712e-02   2.249 0.024554 *  
## stat4       -1.645e-01  5.707e-02  -2.882 0.003966 ** 
## stat5       -4.289e-02  5.762e-02  -0.744 0.456722    
## stat6       -1.465e-01  5.714e-02  -2.564 0.010368 *  
## stat7       -3.387e-02  5.691e-02  -0.595 0.551790    
## stat8       -3.905e-02  5.696e-02  -0.685 0.493066    
## stat9       -2.027e-02  5.708e-02  -0.355 0.722530    
## stat10      -1.196e-01  5.701e-02  -2.098 0.035920 *  
## stat11      -8.443e-02  5.767e-02  -1.464 0.143265    
## stat12       2.330e-02  5.649e-02   0.412 0.680022    
## stat13      -9.209e-02  5.695e-02  -1.617 0.105923    
## stat14      -2.537e-01  5.668e-02  -4.475 7.78e-06 ***
## stat15      -1.258e-01  5.666e-02  -2.220 0.026442 *  
## stat16      -6.163e-02  5.674e-02  -1.086 0.277438    
## stat17       4.362e-03  5.666e-02   0.077 0.938630    
## stat18      -5.829e-02  5.673e-02  -1.027 0.304288    
## stat19       5.289e-02  5.675e-02   0.932 0.351405    
## stat20       2.106e-02  5.695e-02   0.370 0.711525    
## stat21      -6.171e-02  5.741e-02  -1.075 0.282452    
## stat22      -9.004e-02  5.693e-02  -1.581 0.113822    
## stat23       1.550e-01  5.707e-02   2.716 0.006635 ** 
## stat24      -1.222e-01  5.721e-02  -2.136 0.032713 *  
## stat25      -7.666e-02  5.709e-02  -1.343 0.179456    
## stat26      -1.008e-01  5.677e-02  -1.775 0.075940 .  
## stat27      -4.608e-02  5.733e-02  -0.804 0.421553    
## stat28       1.798e-03  5.720e-02   0.031 0.974929    
## stat29       5.082e-02  5.722e-02   0.888 0.374425    
## stat30       9.406e-03  5.754e-02   0.163 0.870148    
## stat31       5.524e-03  5.745e-02   0.096 0.923402    
## stat32       4.145e-03  5.763e-02   0.072 0.942654    
## stat33      -1.700e-01  5.697e-02  -2.985 0.002853 ** 
## stat34       3.675e-02  5.713e-02   0.643 0.520095    
## stat35      -1.555e-01  5.725e-02  -2.715 0.006640 ** 
## stat36       1.171e-03  5.669e-02   0.021 0.983520    
## stat37      -9.930e-02  5.763e-02  -1.723 0.084920 .  
## stat38       1.825e-01  5.715e-02   3.193 0.001415 ** 
## stat39      -1.346e-01  5.682e-02  -2.369 0.017871 *  
## stat40       1.390e-02  5.695e-02   0.244 0.807196    
## stat41      -1.537e-01  5.627e-02  -2.732 0.006322 ** 
## stat42      -7.797e-02  5.697e-02  -1.369 0.171192    
## stat43      -8.180e-02  5.718e-02  -1.431 0.152590    
## stat44       2.924e-02  5.682e-02   0.515 0.606841    
## stat45      -7.597e-02  5.723e-02  -1.328 0.184389    
## stat46       3.753e-02  5.767e-02   0.651 0.515295    
## stat47       9.743e-02  5.738e-02   1.698 0.089580 .  
## stat48      -7.181e-03  5.696e-02  -0.126 0.899685    
## stat49       8.503e-03  5.666e-02   0.150 0.880709    
## stat50       1.242e-01  5.673e-02   2.190 0.028555 *  
## stat51       5.810e-02  5.682e-02   1.022 0.306601    
## stat52      -1.531e-02  5.717e-02  -0.268 0.788942    
## stat53      -1.343e-02  5.745e-02  -0.234 0.815225    
## stat54      -8.824e-02  5.742e-02  -1.537 0.124395    
## stat55       1.427e-02  5.658e-02   0.252 0.800900    
## stat56       5.096e-02  5.724e-02   0.890 0.373387    
## stat57      -1.108e-02  5.684e-02  -0.195 0.845486    
## stat58       2.446e-03  5.647e-02   0.043 0.965454    
## stat59       6.050e-02  5.681e-02   1.065 0.286969    
## stat60       1.087e-01  5.713e-02   1.903 0.057148 .  
## stat61      -3.057e-02  5.690e-02  -0.537 0.591049    
## stat62      -8.544e-02  5.665e-02  -1.508 0.131562    
## stat63       8.721e-02  5.714e-02   1.526 0.126998    
## stat64       1.802e-02  5.647e-02   0.319 0.749650    
## stat65      -7.029e-02  5.729e-02  -1.227 0.219903    
## stat66       6.766e-02  5.787e-02   1.169 0.242328    
## stat67       5.983e-02  5.743e-02   1.042 0.297548    
## stat68      -4.282e-02  5.706e-02  -0.750 0.453030    
## stat69      -1.662e-02  5.669e-02  -0.293 0.769344    
## stat70       5.096e-02  5.681e-02   0.897 0.369770    
## stat71       4.276e-02  5.691e-02   0.751 0.452461    
## stat72       5.734e-02  5.744e-02   0.998 0.318196    
## stat73       8.509e-02  5.731e-02   1.485 0.137688    
## stat74       1.927e-02  5.724e-02   0.337 0.736400    
## stat75       1.267e-02  5.743e-02   0.221 0.825446    
## stat76      -7.028e-03  5.700e-02  -0.123 0.901876    
## stat77       5.681e-03  5.715e-02   0.099 0.920825    
## stat78      -1.058e-01  5.708e-02  -1.854 0.063750 .  
## stat79       6.197e-02  5.695e-02   1.088 0.276538    
## stat80       7.615e-02  5.717e-02   1.332 0.182900    
## stat81       6.029e-02  5.727e-02   1.053 0.292453    
## stat82       2.419e-02  5.712e-02   0.424 0.671871    
## stat83      -2.493e-03  5.706e-02  -0.044 0.965151    
## stat84      -9.260e-02  5.708e-02  -1.622 0.104800    
## stat85      -5.591e-02  5.714e-02  -0.978 0.327905    
## stat86       3.508e-02  5.730e-02   0.612 0.540409    
## stat87      -5.425e-02  5.714e-02  -0.949 0.342469    
## stat88      -1.029e-02  5.693e-02  -0.181 0.856543    
## stat89      -4.040e-02  5.697e-02  -0.709 0.478219    
## stat90      -5.604e-02  5.716e-02  -0.980 0.326991    
## stat91      -8.406e-02  5.663e-02  -1.484 0.137759    
## stat92      -9.160e-02  5.685e-02  -1.611 0.107164    
## stat93      -2.111e-02  5.794e-02  -0.364 0.715625    
## stat94       2.765e-02  5.732e-02   0.482 0.629516    
## stat95       8.477e-02  5.691e-02   1.490 0.136411    
## stat96      -3.339e-02  5.693e-02  -0.586 0.557597    
## stat97      -1.299e-02  5.659e-02  -0.229 0.818504    
## stat98       9.648e-01  5.631e-02  17.133  < 2e-16 ***
## stat99       5.991e-02  5.725e-02   1.046 0.295393    
## stat100      1.826e-01  5.718e-02   3.194 0.001410 ** 
## stat101     -3.588e-02  5.769e-02  -0.622 0.534033    
## stat102      2.298e-02  5.729e-02   0.401 0.688429    
## stat103     -7.130e-02  5.754e-02  -1.239 0.215291    
## stat104     -1.423e-02  5.695e-02  -0.250 0.802730    
## stat105      8.695e-02  5.661e-02   1.536 0.124648    
## stat106     -1.086e-01  5.685e-02  -1.910 0.056131 .  
## stat107     -5.293e-02  5.720e-02  -0.925 0.354886    
## stat108     -1.829e-02  5.711e-02  -0.320 0.748853    
## stat109     -3.427e-02  5.723e-02  -0.599 0.549301    
## stat110     -9.074e-01  5.671e-02 -16.001  < 2e-16 ***
## stat111      4.466e-02  5.698e-02   0.784 0.433211    
## stat112      1.395e-02  5.746e-02   0.243 0.808223    
## stat113      9.215e-03  5.746e-02   0.160 0.872598    
## stat114      4.348e-02  5.750e-02   0.756 0.449572    
## stat115      1.141e-01  5.710e-02   1.998 0.045818 *  
## stat116      8.528e-02  5.767e-02   1.479 0.139225    
## stat117      7.156e-02  5.714e-02   1.252 0.210503    
## stat118      1.547e-02  5.668e-02   0.273 0.784843    
## stat119      1.061e-01  5.686e-02   1.867 0.062020 .  
## stat120      2.504e-02  5.662e-02   0.442 0.658382    
## stat121     -3.080e-02  5.738e-02  -0.537 0.591473    
## stat122     -6.132e-02  5.685e-02  -1.079 0.280775    
## stat123      9.642e-02  5.759e-02   1.674 0.094143 .  
## stat124     -4.151e-02  5.679e-02  -0.731 0.464897    
## stat125      1.286e-02  5.736e-02   0.224 0.822595    
## stat126     -6.164e-03  5.699e-02  -0.108 0.913879    
## stat127     -3.064e-02  5.671e-02  -0.540 0.589048    
## stat128     -4.420e-02  5.675e-02  -0.779 0.436111    
## stat129      1.930e-03  5.655e-02   0.034 0.972775    
## stat130      4.099e-02  5.740e-02   0.714 0.475217    
## stat131      2.397e-02  5.714e-02   0.419 0.674917    
## stat132     -8.747e-02  5.677e-02  -1.541 0.123431    
## stat133      6.462e-02  5.719e-02   1.130 0.258578    
## stat134     -6.097e-02  5.682e-02  -1.073 0.283366    
## stat135     -2.843e-02  5.712e-02  -0.498 0.618685    
## stat136     -1.254e-02  5.706e-02  -0.220 0.826064    
## stat137      8.410e-02  5.666e-02   1.484 0.137810    
## stat138      4.378e-02  5.719e-02   0.766 0.443952    
## stat139     -6.446e-03  5.720e-02  -0.113 0.910274    
## stat140     -6.005e-02  5.672e-02  -1.059 0.289810    
## stat141      8.710e-02  5.661e-02   1.539 0.123942    
## stat142      2.936e-02  5.746e-02   0.511 0.609369    
## stat143     -5.725e-03  5.705e-02  -0.100 0.920066    
## stat144      1.298e-01  5.639e-02   2.303 0.021339 *  
## stat145     -2.276e-02  5.789e-02  -0.393 0.694209    
## stat146     -9.056e-02  5.732e-02  -1.580 0.114201    
## stat147     -6.916e-02  5.772e-02  -1.198 0.230932    
## stat148     -1.046e-01  5.626e-02  -1.859 0.063142 .  
## stat149     -1.828e-01  5.722e-02  -3.195 0.001406 ** 
## stat150     -2.510e-02  5.741e-02  -0.437 0.661991    
## stat151      9.972e-03  5.813e-02   0.172 0.863806    
## stat152     -5.857e-02  5.698e-02  -1.028 0.304071    
## stat153      7.057e-02  5.787e-02   1.220 0.222683    
## stat154      5.373e-03  5.761e-02   0.093 0.925696    
## stat155     -8.870e-03  5.690e-02  -0.156 0.876122    
## stat156      1.945e-01  5.726e-02   3.396 0.000688 ***
## stat157      2.870e-02  5.678e-02   0.505 0.613251    
## stat158     -4.424e-04  5.771e-02  -0.008 0.993884    
## stat159      1.379e-02  5.679e-02   0.243 0.808121    
## stat160      4.656e-03  5.748e-02   0.081 0.935444    
## stat161      5.455e-02  5.745e-02   0.949 0.342461    
## stat162     -5.137e-02  5.653e-02  -0.909 0.363503    
## stat163      1.131e-02  5.784e-02   0.196 0.844967    
## stat164      2.228e-02  5.725e-02   0.389 0.697192    
## stat165     -3.964e-02  5.679e-02  -0.698 0.485139    
## stat166     -6.409e-02  5.647e-02  -1.135 0.256490    
## stat167     -1.055e-01  5.682e-02  -1.856 0.063511 .  
## stat168     -3.610e-02  5.706e-02  -0.633 0.527035    
## stat169     -6.499e-02  5.744e-02  -1.131 0.257958    
## stat170     -8.522e-03  5.696e-02  -0.150 0.881064    
## stat171     -4.750e-02  5.746e-02  -0.827 0.408421    
## stat172      1.661e-01  5.694e-02   2.918 0.003541 ** 
## stat173      1.254e-02  5.706e-02   0.220 0.826084    
## stat174      7.481e-02  5.692e-02   1.314 0.188753    
## stat175     -4.793e-02  5.739e-02  -0.835 0.403700    
## stat176     -1.166e-01  5.687e-02  -2.051 0.040323 *  
## stat177     -9.202e-02  5.741e-02  -1.603 0.109035    
## stat178      2.773e-02  5.800e-02   0.478 0.632592    
## stat179     -5.187e-02  5.651e-02  -0.918 0.358641    
## stat180     -6.484e-02  5.670e-02  -1.144 0.252839    
## stat181      9.516e-02  5.712e-02   1.666 0.095791 .  
## stat182      9.005e-02  5.713e-02   1.576 0.115022    
## stat183      3.572e-03  5.710e-02   0.063 0.950117    
## stat184      9.258e-02  5.750e-02   1.610 0.107438    
## stat185      2.740e-03  5.644e-02   0.049 0.961287    
## stat186      7.015e-02  5.752e-02   1.219 0.222716    
## stat187     -6.206e-02  5.682e-02  -1.092 0.274752    
## stat188     -2.156e-02  5.674e-02  -0.380 0.703942    
## stat189     -1.877e-02  5.726e-02  -0.328 0.743088    
## stat190     -1.223e-02  5.661e-02  -0.216 0.829015    
## stat191     -9.372e-02  5.689e-02  -1.647 0.099565 .  
## stat192      3.588e-02  5.787e-02   0.620 0.535303    
## stat193      1.610e-02  5.759e-02   0.280 0.779806    
## stat194     -2.569e-02  5.703e-02  -0.450 0.652402    
## stat195      2.683e-02  5.743e-02   0.467 0.640376    
## stat196     -6.189e-03  5.762e-02  -0.107 0.914465    
## stat197     -5.689e-02  5.644e-02  -1.008 0.313517    
## stat198     -5.718e-02  5.698e-02  -1.004 0.315621    
## stat199      3.639e-02  5.647e-02   0.644 0.519319    
## stat200     -7.932e-02  5.641e-02  -1.406 0.159729    
## stat201      3.153e-02  5.693e-02   0.554 0.579654    
## stat202      8.662e-03  5.779e-02   0.150 0.880849    
## stat203      3.070e-02  5.705e-02   0.538 0.590435    
## stat204     -5.890e-02  5.714e-02  -1.031 0.302714    
## stat205      3.088e-03  5.678e-02   0.054 0.956630    
## stat206     -9.447e-02  5.747e-02  -1.644 0.100287    
## stat207      2.613e-02  5.754e-02   0.454 0.649747    
## stat208      6.176e-02  5.734e-02   1.077 0.281518    
## stat209      1.311e-02  5.668e-02   0.231 0.817100    
## stat210     -1.033e-01  5.742e-02  -1.799 0.072074 .  
## stat211     -5.829e-02  5.702e-02  -1.022 0.306634    
## stat212      5.304e-02  5.722e-02   0.927 0.353958    
## stat213     -7.850e-02  5.706e-02  -1.376 0.168926    
## stat214     -7.612e-02  5.708e-02  -1.334 0.182400    
## stat215     -3.828e-02  5.734e-02  -0.668 0.504435    
## stat216     -4.636e-02  5.698e-02  -0.814 0.415877    
## stat217      2.588e-02  5.713e-02   0.453 0.650605    
## sqrt.x18     7.391e+00  2.170e-01  34.054  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 7.312 on 5473 degrees of freedom
## Multiple R-squared:  0.3573, Adjusted R-squared:  0.3291 
## F-statistic: 12.68 on 240 and 5473 DF,  p-value: < 2.2e-16
cd.full2 = plot.diagnostics(model.full2, data.train2)

## [1] "Number of data points that have Cook's D > 4/n: 321"
## [1] "Number of data points that have Cook's D > 1: 0"
# much more normal residuals than before. 
# Checking to see if distributions are different and if so whcih variables
# High Leverage Plot 
plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,target=one_of(label.names))

ggplot(data=plotData, aes(x=type,y=target)) +
  geom_boxplot(fill='light blue',outlier.shape=NA) +
  scale_y_continuous(name="Target Variable Values") +
  theme_light() +
  ggtitle('Distribution of High Leverage Points and Normal  Points')

plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,one_of(feature.names))
# 2 sample t-tests
comp.test = lapply(dplyr::select(plotData, one_of(feature.names)), function(x) t.test(x ~ plotData$type, var.equal = TRUE)) 

sig.comp = list.filter(comp.test, p.value < 0.05)
sapply(sig.comp, function(x) x[['p.value']])
##       stat64       stat67       stat82       stat93       stat98 
## 3.437616e-02 4.380049e-02 7.575191e-03 1.885805e-02 3.609357e-05 
##      stat110      stat146      stat214     sqrt.x18 
## 4.617328e-04 3.659329e-02 3.341761e-02 2.352371e-03
# Distribution (box) Plots
mm = melt(plotData, id=c('type'))

ggplot(mm) +
  geom_boxplot(aes(x=type, y=value))+
  facet_wrap(~variable, ncol=10, scales = 'free') +
  ggtitle('Distribution of High Leverage Points and Normal Points')

ggsave('comparison.jpeg', width =50, height = 400, units='cm',limitsize = FALSE)

Grand Means Model

model.null = lm(grand.mean.formula, data.train)
model.null2 = lm(grand.mean.formula, data.train2)

Variable Selection

Basic: http://www.stat.columbia.edu/~martin/W2024/R10.pdf Cross Validation + Other Metrics: http://www.sthda.com/english/articles/37-model-selection-essentials-in-r/154-stepwise-regression-essentials-in-r/

Forward Selection (w/ full train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward = step(model.null, scope=list(lower=model.null, upper=model.full), direction="forward", trace = 0)
  print(summary(model.forward))

  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward, data.train)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward, data.test, "Forward Selection")
}

Forward Selection (w/ filtered train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward2 = step(model.null2, scope=list(lower=model.null2, upper=model.full2), direction="forward", trace = 0)
  print(summary(model.forward2))

  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward2, data.train2)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward2, data.test, "Forward Selection (2)")
}

Forward Selection with CV (w/ full train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   , data = data.train
                                   , method = "leapForward"
                                   , feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 9 on full training set
##     nvmax      RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 10.210693 0.1018765 7.806589 0.3486064 0.02595312 0.1962709
## 2       2  9.979515 0.1418363 7.597398 0.3207096 0.02875352 0.1777050
## 3       3  9.814897 0.1693658 7.438338 0.3290643 0.02785753 0.1807549
## 4       4  9.667493 0.1938902 7.231990 0.3201540 0.02682717 0.1803033
## 5       5  9.588875 0.2068466 7.172167 0.3419604 0.02484619 0.1975346
## 6       6  9.591660 0.2066208 7.174795 0.3521685 0.02739708 0.2020679
## 7       7  9.571737 0.2099116 7.168707 0.3525200 0.02833739 0.2076171
## 8       8  9.546348 0.2140867 7.150908 0.3571826 0.02810121 0.2058362
## 9       9  9.521497 0.2180754 7.135342 0.3515932 0.02753943 0.1919503
## 10     10  9.531634 0.2164981 7.142145 0.3502059 0.02798899 0.1947027
## 11     11  9.531499 0.2165176 7.142129 0.3493156 0.02735269 0.1916070
## 12     12  9.541275 0.2149978 7.148145 0.3511383 0.02743081 0.1879036
## 13     13  9.539867 0.2151993 7.145155 0.3456114 0.02644858 0.1873780
## 14     14  9.532611 0.2163848 7.137358 0.3470188 0.02761253 0.1915646
## 15     15  9.525866 0.2174740 7.126506 0.3478493 0.02822187 0.1955673
## 16     16  9.532841 0.2163902 7.129214 0.3525212 0.02855943 0.1972010
## 17     17  9.532361 0.2164472 7.126063 0.3482723 0.02779395 0.1925486
## 18     18  9.535834 0.2159596 7.133904 0.3474052 0.02771944 0.1957735
## 19     19  9.535883 0.2159502 7.135435 0.3475181 0.02829213 0.1958929
## 20     20  9.537108 0.2157991 7.133816 0.3489418 0.02882166 0.1925465
## 21     21  9.539986 0.2153852 7.134572 0.3543507 0.02926639 0.1906264
## 22     22  9.545096 0.2146096 7.139438 0.3574050 0.02982504 0.1984879
## 23     23  9.548369 0.2140656 7.141456 0.3559449 0.02965578 0.1939936
## 24     24  9.556574 0.2127700 7.149307 0.3553712 0.02886227 0.1889438
## 25     25  9.560161 0.2122506 7.154158 0.3584403 0.02911492 0.1868420
## 26     26  9.567531 0.2110771 7.161210 0.3559550 0.02891696 0.1891417
## 27     27  9.568763 0.2108701 7.163947 0.3543281 0.02851222 0.1878238
## 28     28  9.578408 0.2094001 7.166077 0.3544061 0.02808319 0.1866707
## 29     29  9.584943 0.2084431 7.172753 0.3576149 0.02858654 0.1893383
## 30     30  9.590321 0.2076733 7.173134 0.3634458 0.02876178 0.1965202
## 31     31  9.596942 0.2066920 7.176744 0.3634694 0.02889365 0.1994013
## 32     32  9.602009 0.2059721 7.183235 0.3663653 0.02903096 0.2058716
## 33     33  9.605377 0.2054437 7.188016 0.3651406 0.02880960 0.2101985
## 34     34  9.608019 0.2050839 7.189678 0.3650497 0.02875688 0.2131259
## 35     35  9.609675 0.2048133 7.190762 0.3564514 0.02785446 0.2072426
## 36     36  9.613063 0.2043122 7.194013 0.3567571 0.02771453 0.2094076
## 37     37  9.619848 0.2032673 7.201187 0.3571364 0.02729727 0.2085368
## 38     38  9.625883 0.2023755 7.204602 0.3537499 0.02711129 0.2025209
## 39     39  9.628373 0.2020049 7.207561 0.3513626 0.02665465 0.2057779
## 40     40  9.632131 0.2014506 7.209335 0.3514043 0.02691156 0.2068670
## 41     41  9.632626 0.2013750 7.208457 0.3522246 0.02684180 0.2095138
## 42     42  9.635143 0.2010236 7.211166 0.3557478 0.02693283 0.2143238
## 43     43  9.639155 0.2004534 7.215106 0.3590516 0.02719877 0.2117256
## 44     44  9.644516 0.1996461 7.220196 0.3569259 0.02692388 0.2083983
## 45     45  9.646351 0.1994383 7.222689 0.3562603 0.02736089 0.2107924
## 46     46  9.650263 0.1988711 7.223454 0.3575405 0.02759603 0.2141574
## 47     47  9.653936 0.1983459 7.225812 0.3552781 0.02769683 0.2116223
## 48     48  9.656438 0.1980132 7.227800 0.3572046 0.02837399 0.2111106
## 49     49  9.661297 0.1972873 7.230177 0.3584694 0.02841858 0.2156499
## 50     50  9.661678 0.1973108 7.232656 0.3610208 0.02896234 0.2197475
## 51     51  9.663216 0.1970700 7.235766 0.3605234 0.02895781 0.2239816
## 52     52  9.666690 0.1965300 7.237825 0.3565754 0.02875953 0.2189952
## 53     53  9.669841 0.1960224 7.240503 0.3578076 0.02827219 0.2172610
## 54     54  9.672332 0.1957025 7.243661 0.3596879 0.02840620 0.2167286
## 55     55  9.676210 0.1951180 7.247522 0.3572600 0.02808822 0.2125986
## 56     56  9.678724 0.1947529 7.251337 0.3586957 0.02810280 0.2174194
## 57     57  9.679344 0.1947004 7.250862 0.3571636 0.02776301 0.2168266
## 58     58  9.683005 0.1941772 7.252193 0.3574051 0.02749202 0.2160766
## 59     59  9.683791 0.1940636 7.254804 0.3552678 0.02764702 0.2123418
## 60     60  9.681709 0.1943979 7.251209 0.3552790 0.02784421 0.2102172
## 61     61  9.684873 0.1939576 7.254581 0.3555684 0.02816321 0.2117349
## 62     62  9.684143 0.1940724 7.256817 0.3558320 0.02767953 0.2098835
## 63     63  9.687365 0.1936444 7.257991 0.3549303 0.02746551 0.2078838
## 64     64  9.688831 0.1934956 7.259952 0.3578128 0.02808321 0.2105320
## 65     65  9.688023 0.1935787 7.258746 0.3574163 0.02782399 0.2126790
## 66     66  9.689957 0.1933137 7.259252 0.3580896 0.02741208 0.2133409
## 67     67  9.689771 0.1933961 7.257998 0.3602286 0.02771117 0.2145943
## 68     68  9.691509 0.1931944 7.257951 0.3592407 0.02790230 0.2111481
## 69     69  9.691488 0.1932008 7.257627 0.3559450 0.02750716 0.2088413
## 70     70  9.689064 0.1935861 7.256186 0.3555813 0.02770565 0.2073898
## 71     71  9.689612 0.1935533 7.255781 0.3589698 0.02827682 0.2091546
## 72     72  9.689917 0.1935373 7.255957 0.3607968 0.02810326 0.2095896
## 73     73  9.688082 0.1938077 7.256937 0.3613263 0.02790771 0.2093251
## 74     74  9.689396 0.1935915 7.258453 0.3586836 0.02756373 0.2062293
## 75     75  9.688879 0.1937038 7.258113 0.3549547 0.02716287 0.2022783
## 76     76  9.691189 0.1933690 7.259718 0.3527558 0.02700823 0.1987957
## 77     77  9.691443 0.1933204 7.259059 0.3515241 0.02717847 0.1994354
## 78     78  9.687462 0.1939407 7.256122 0.3523742 0.02742099 0.2030728
## 79     79  9.686008 0.1941786 7.255603 0.3533218 0.02734732 0.2044142
## 80     80  9.689984 0.1936037 7.258016 0.3501682 0.02697875 0.2018042
## 81     81  9.690150 0.1936158 7.258779 0.3529217 0.02742064 0.2025384
## 82     82  9.690250 0.1935989 7.257515 0.3518228 0.02743420 0.2008556
## 83     83  9.693437 0.1931468 7.260294 0.3530499 0.02732938 0.2009855
## 84     84  9.693501 0.1931732 7.260473 0.3561392 0.02736843 0.2031897
## 85     85  9.693804 0.1931537 7.262102 0.3551883 0.02739868 0.2048698
## 86     86  9.694236 0.1931039 7.263032 0.3550944 0.02718449 0.2042880
## 87     87  9.692127 0.1934310 7.260169 0.3529923 0.02693648 0.2032702
## 88     88  9.692059 0.1934602 7.261792 0.3559334 0.02753307 0.2063066
## 89     89  9.693638 0.1932626 7.264383 0.3531071 0.02735124 0.2032762
## 90     90  9.695486 0.1929860 7.265482 0.3514273 0.02705292 0.2016088
## 91     91  9.695773 0.1928992 7.266406 0.3507129 0.02674625 0.2022263
## 92     92  9.694417 0.1931147 7.264096 0.3513644 0.02701991 0.2016721
## 93     93  9.697373 0.1927257 7.266673 0.3498407 0.02690686 0.1989564
## 94     94  9.694635 0.1931139 7.266569 0.3509674 0.02692018 0.1994855
## 95     95  9.695091 0.1930696 7.267448 0.3521849 0.02719542 0.2012604
## 96     96  9.695178 0.1930754 7.267516 0.3522821 0.02753624 0.2026018
## 97     97  9.692798 0.1934469 7.264351 0.3512426 0.02757329 0.2021689
## 98     98  9.692407 0.1934860 7.265424 0.3481302 0.02742818 0.2013792
## 99     99  9.691711 0.1936090 7.267589 0.3462840 0.02713372 0.1999290
## 100   100  9.691441 0.1935999 7.266659 0.3447557 0.02691220 0.1991989
## 101   101  9.691664 0.1935443 7.266953 0.3441838 0.02670262 0.1992938
## 102   102  9.694980 0.1930677 7.268472 0.3450903 0.02683975 0.2001116
## 103   103  9.694546 0.1931565 7.267390 0.3470629 0.02687205 0.2026612
## 104   104  9.694703 0.1931350 7.267185 0.3463995 0.02671831 0.2016367
## 105   105  9.694982 0.1931394 7.268128 0.3474113 0.02675444 0.2020220
## 106   106  9.692997 0.1934482 7.265571 0.3483559 0.02700362 0.2022296
## 107   107  9.693339 0.1934192 7.263105 0.3506780 0.02703746 0.2044658
## 108   108  9.694399 0.1932631 7.261911 0.3498684 0.02670262 0.2051578
## 109   109  9.694761 0.1932444 7.262043 0.3490516 0.02653802 0.2064869
## 110   110  9.693563 0.1934402 7.260259 0.3482459 0.02652027 0.2066578
## 111   111  9.695733 0.1931720 7.260721 0.3490042 0.02654383 0.2065935
## 112   112  9.699057 0.1926691 7.265924 0.3444228 0.02612708 0.2019630
## 113   113  9.699108 0.1927028 7.265524 0.3416723 0.02607880 0.2008849
## 114   114  9.700501 0.1925139 7.267376 0.3420668 0.02604139 0.2015627
## 115   115  9.700575 0.1925041 7.267046 0.3401314 0.02583579 0.1991818
## 116   116  9.701421 0.1924063 7.265952 0.3404865 0.02618439 0.2000405
## 117   117  9.701847 0.1923575 7.265523 0.3387378 0.02612060 0.1986849
## 118   118  9.701131 0.1924840 7.265311 0.3394010 0.02611516 0.1978235
## 119   119  9.701957 0.1923897 7.267469 0.3385725 0.02610626 0.1982901
## 120   120  9.698905 0.1928497 7.266861 0.3386928 0.02618241 0.1992798
## 121   121  9.700207 0.1926744 7.266861 0.3403616 0.02607734 0.1989074
## 122   122  9.699802 0.1927391 7.267443 0.3387849 0.02632963 0.1972449
## 123   123  9.698763 0.1928845 7.267285 0.3394941 0.02635248 0.1976528
## 124   124  9.699654 0.1927469 7.269303 0.3378107 0.02602415 0.1950250
## 125   125  9.700044 0.1927113 7.270595 0.3404225 0.02629957 0.1965995
## 126   126  9.698635 0.1929456 7.270336 0.3402247 0.02656200 0.1970726
## 127   127  9.699494 0.1928214 7.271363 0.3376679 0.02627093 0.1946691
## 128   128  9.698565 0.1929329 7.270181 0.3369873 0.02614222 0.1949181
## 129   129  9.698099 0.1930101 7.270023 0.3387016 0.02626693 0.1961883
## 130   130  9.698527 0.1929570 7.269079 0.3389947 0.02624710 0.1948468
## 131   131  9.699954 0.1927590 7.271706 0.3379885 0.02642737 0.1951646
## 132   132  9.702112 0.1924609 7.272163 0.3376784 0.02638499 0.1944853
## 133   133  9.702649 0.1923948 7.272820 0.3388609 0.02656596 0.1958059
## 134   134  9.700675 0.1926849 7.271818 0.3395236 0.02642825 0.1957372
## 135   135  9.700222 0.1927673 7.272235 0.3388912 0.02623423 0.1960871
## 136   136  9.700851 0.1927185 7.272453 0.3390501 0.02633402 0.1961100
## 137   137  9.701029 0.1926953 7.272269 0.3405803 0.02676330 0.1967972
## 138   138  9.701565 0.1926440 7.272729 0.3408626 0.02664153 0.1969086
## 139   139  9.704114 0.1923040 7.274296 0.3406635 0.02672079 0.1965254
## 140   140  9.703699 0.1923487 7.274084 0.3402685 0.02688528 0.1955946
## 141   141  9.702996 0.1924649 7.273427 0.3386508 0.02676303 0.1935418
## 142   142  9.704774 0.1921841 7.274815 0.3362565 0.02656328 0.1919154
## 143   143  9.706357 0.1919710 7.276729 0.3358908 0.02660394 0.1910240
## 144   144  9.704214 0.1922934 7.274991 0.3369830 0.02656617 0.1921628
## 145   145  9.705566 0.1921434 7.275978 0.3371607 0.02682012 0.1913397
## 146   146  9.706269 0.1920389 7.277068 0.3373873 0.02683822 0.1916561
## 147   147  9.705085 0.1922026 7.276425 0.3373515 0.02673055 0.1915187
## 148   148  9.704911 0.1922397 7.276552 0.3373313 0.02678738 0.1924604
## 149   149  9.707071 0.1919650 7.278475 0.3390115 0.02704366 0.1928745
## 150   150  9.706344 0.1920691 7.278001 0.3379772 0.02694983 0.1910947
## 151   151  9.706748 0.1920181 7.277168 0.3387240 0.02703103 0.1901729
## 152   152  9.706324 0.1920741 7.276622 0.3386834 0.02696635 0.1903012
## 153   153  9.704879 0.1922686 7.275758 0.3385577 0.02701028 0.1908511
## 154   154  9.705020 0.1922380 7.276089 0.3385786 0.02691514 0.1910534
## 155   155  9.704600 0.1923028 7.275738 0.3387205 0.02680884 0.1916219
## 156   156  9.704513 0.1923068 7.274916 0.3380627 0.02666329 0.1895684
## 157   157  9.704398 0.1923062 7.273772 0.3386624 0.02666397 0.1910449
## 158   158  9.704606 0.1922715 7.273908 0.3393221 0.02655669 0.1916567
## 159   159  9.706032 0.1920574 7.275359 0.3403004 0.02654980 0.1914877
## 160   160  9.704460 0.1922881 7.273292 0.3414840 0.02665290 0.1923099
## 161   161  9.705080 0.1921998 7.273535 0.3410757 0.02661807 0.1913351
## 162   162  9.704095 0.1923570 7.272853 0.3409219 0.02673132 0.1915933
## 163   163  9.703645 0.1924299 7.272162 0.3405043 0.02667306 0.1910276
## 164   164  9.703163 0.1925060 7.271736 0.3409699 0.02670420 0.1916331
## 165   165  9.703847 0.1924197 7.273117 0.3417612 0.02670882 0.1915095
## 166   166  9.704622 0.1923163 7.274054 0.3408596 0.02664589 0.1915127
## 167   167  9.704096 0.1923814 7.274649 0.3417619 0.02684851 0.1913057
## 168   168  9.703297 0.1925005 7.274374 0.3418504 0.02707423 0.1904948
## 169   169  9.703244 0.1925071 7.274468 0.3421153 0.02700240 0.1905269
## 170   170  9.702367 0.1926487 7.273260 0.3432559 0.02717293 0.1912544
## 171   171  9.702840 0.1925854 7.273152 0.3425182 0.02715790 0.1909568
## 172   172  9.702221 0.1926962 7.272604 0.3428953 0.02719244 0.1913096
## 173   173  9.701577 0.1927818 7.272618 0.3430142 0.02723474 0.1921650
## 174   174  9.700372 0.1929652 7.271121 0.3430442 0.02721216 0.1924483
## 175   175  9.701029 0.1928888 7.272035 0.3436628 0.02739819 0.1932923
## 176   176  9.701902 0.1927679 7.272780 0.3430132 0.02721065 0.1931672
## 177   177  9.701989 0.1927521 7.272228 0.3433882 0.02718574 0.1939876
## 178   178  9.702685 0.1926705 7.272749 0.3460154 0.02742260 0.1961431
## 179   179  9.703002 0.1926303 7.273621 0.3460370 0.02733456 0.1956420
## 180   180  9.703481 0.1925558 7.274308 0.3459908 0.02723305 0.1961563
## 181   181  9.703882 0.1925085 7.274385 0.3458318 0.02729889 0.1960555
## 182   182  9.705298 0.1923037 7.275209 0.3465585 0.02747757 0.1965091
## 183   183  9.704513 0.1924216 7.274921 0.3474233 0.02761610 0.1961744
## 184   184  9.704500 0.1924371 7.274731 0.3469509 0.02769948 0.1960387
## 185   185  9.704641 0.1924108 7.275130 0.3467324 0.02761264 0.1961520
## 186   186  9.704739 0.1924049 7.274526 0.3469983 0.02763660 0.1970158
## 187   187  9.704815 0.1923725 7.274728 0.3451005 0.02725401 0.1961205
## 188   188  9.704796 0.1923605 7.275212 0.3439497 0.02713628 0.1950359
## 189   189  9.704711 0.1923701 7.274616 0.3444465 0.02721944 0.1953944
## 190   190  9.705464 0.1922544 7.275405 0.3432300 0.02711096 0.1944152
## 191   191  9.706538 0.1920932 7.276061 0.3429824 0.02687769 0.1942740
## 192   192  9.707090 0.1920082 7.276367 0.3432024 0.02685902 0.1948152
## 193   193  9.707824 0.1919073 7.276448 0.3432033 0.02686022 0.1944440
## 194   194  9.707287 0.1919870 7.276099 0.3435543 0.02689076 0.1950857
## 195   195  9.707372 0.1919857 7.276107 0.3436176 0.02696579 0.1953178
## 196   196  9.706264 0.1921508 7.275739 0.3432227 0.02704117 0.1946457
## 197   197  9.706917 0.1920720 7.276033 0.3434183 0.02698429 0.1948166
## 198   198  9.706757 0.1920972 7.275784 0.3439086 0.02702790 0.1950741
## 199   199  9.706563 0.1921089 7.275653 0.3434929 0.02692172 0.1951014
## 200   200  9.707453 0.1919860 7.276681 0.3434323 0.02696871 0.1953741
## 201   201  9.706849 0.1920703 7.275752 0.3430747 0.02697143 0.1953223
## 202   202  9.707248 0.1920176 7.276412 0.3426836 0.02694603 0.1949861
## 203   203  9.706883 0.1920770 7.275996 0.3427294 0.02698645 0.1947452
## 204   204  9.706961 0.1920698 7.276315 0.3418034 0.02690694 0.1945011
## 205   205  9.707031 0.1920645 7.276134 0.3422205 0.02687238 0.1948306
## 206   206  9.707212 0.1920317 7.276559 0.3417056 0.02686176 0.1949580
## 207   207  9.707812 0.1919395 7.277206 0.3412297 0.02680738 0.1944318
## 208   208  9.707383 0.1920054 7.276927 0.3421387 0.02687391 0.1951521
## 209   209  9.707098 0.1920555 7.276526 0.3423560 0.02689591 0.1952065
## 210   210  9.706889 0.1920889 7.276527 0.3423302 0.02688841 0.1946550
## 211   211  9.707230 0.1920369 7.276482 0.3420154 0.02689284 0.1944229
## 212   212  9.707232 0.1920256 7.276565 0.3417825 0.02685283 0.1942663
## 213   213  9.707529 0.1919811 7.276811 0.3422276 0.02685810 0.1946583
## 214   214  9.706872 0.1920779 7.276312 0.3421563 0.02687518 0.1946258
## 215   215  9.707125 0.1920436 7.276537 0.3423709 0.02688269 0.1948166
## 216   216  9.707523 0.1919918 7.276899 0.3421291 0.02683683 0.1943649
## 217   217  9.707587 0.1919836 7.276913 0.3417189 0.02684910 0.1941020
## 218   218  9.707312 0.1920244 7.276848 0.3418386 0.02686413 0.1943437
## 219   219  9.707181 0.1920418 7.276845 0.3416553 0.02685859 0.1940363
## 220   220  9.707422 0.1920032 7.277065 0.3416047 0.02686831 0.1939818
## 221   221  9.707501 0.1919944 7.277178 0.3418774 0.02689742 0.1940936
## 222   222  9.707310 0.1920203 7.276914 0.3417660 0.02687880 0.1941040
## 223   223  9.707131 0.1920444 7.276929 0.3416600 0.02687449 0.1941464
## 224   224  9.707377 0.1920082 7.277091 0.3416442 0.02686254 0.1942232
## 225   225  9.707262 0.1920221 7.276725 0.3415707 0.02684678 0.1942614
## 226   226  9.707045 0.1920539 7.276607 0.3416431 0.02685931 0.1943162
## 227   227  9.706838 0.1920822 7.276457 0.3414971 0.02684527 0.1940610
## 228   228  9.706890 0.1920750 7.276668 0.3413004 0.02681223 0.1939648
## 229   229  9.706862 0.1920806 7.276604 0.3413612 0.02683954 0.1941413
## 230   230  9.706910 0.1920736 7.276613 0.3413077 0.02683709 0.1940985
## 231   231  9.706816 0.1920869 7.276530 0.3414668 0.02684573 0.1942758
## 232   232  9.706798 0.1920900 7.276554 0.3414690 0.02683953 0.1942318
## 233   233  9.706781 0.1920922 7.276498 0.3415837 0.02685977 0.1943116
## 234   234  9.706862 0.1920808 7.276539 0.3414934 0.02684693 0.1942315
## 235   235  9.706860 0.1920807 7.276577 0.3414986 0.02684323 0.1942913
## 236   236  9.706824 0.1920873 7.276570 0.3415444 0.02684716 0.1942983
## 237   237  9.706853 0.1920834 7.276587 0.3415864 0.02684751 0.1943318
## 238   238  9.706906 0.1920760 7.276629 0.3415859 0.02684647 0.1943426
## 239   239  9.706889 0.1920785 7.276621 0.3416035 0.02684944 0.1943714
## 240   240  9.706892 0.1920777 7.276628 0.3415893 0.02684758 0.1943579
##   nvmax
## 9     9

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

## (Intercept)          x4          x7          x9         x10         x16 
## 96.70112202 -0.01426636  3.24632946  0.95963994  0.38442187  0.28876623 
##         x17      stat98     stat110    sqrt.x18 
##  0.43713626  1.02673501 -0.96934667  7.48687040

Test

if (algo.forward.caret == TRUE){
    test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   109.7   122.1   125.4   125.4   129.1   142.0 
## [1] "leapForward  Test MSE: 93.4589126618511"

Forward Selection with CV (w/ filtered train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapForward"
                                   ,feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 26 on full training set
##     nvmax     RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 8.276677 0.1410643 6.684123 0.2155831 0.02632973 0.1530131
## 2       2 7.964853 0.2051380 6.452767 0.1909273 0.02994904 0.1500269
## 3       3 7.805053 0.2361881 6.289234 0.2298029 0.03151009 0.1643719
## 4       4 7.622695 0.2714980 6.087228 0.2226076 0.03167759 0.1714596
## 5       5 7.529243 0.2894565 6.016453 0.2136827 0.03150015 0.1652114
## 6       6 7.510020 0.2933011 6.013178 0.2178123 0.03393791 0.1754908
## 7       7 7.477940 0.2991413 5.998621 0.1977571 0.03158270 0.1635809
## 8       8 7.457692 0.3027901 5.991982 0.2007623 0.03129776 0.1654502
## 9       9 7.426062 0.3086808 5.971902 0.2009964 0.03261263 0.1703405
## 10     10 7.426107 0.3086253 5.970053 0.2029215 0.03306123 0.1751513
## 11     11 7.414367 0.3108614 5.961918 0.1922128 0.03366771 0.1725217
## 12     12 7.414979 0.3107081 5.958373 0.1988920 0.03250733 0.1713669
## 13     13 7.419061 0.3100373 5.959337 0.2045511 0.03370128 0.1767538
## 14     14 7.423084 0.3094086 5.964853 0.2060659 0.03414807 0.1763194
## 15     15 7.428243 0.3084396 5.967437 0.1992053 0.03443480 0.1704521
## 16     16 7.426650 0.3087507 5.966682 0.2045696 0.03582192 0.1762110
## 17     17 7.422114 0.3095819 5.961941 0.2019137 0.03395284 0.1724250
## 18     18 7.422310 0.3095057 5.963113 0.2035708 0.03389641 0.1727064
## 19     19 7.420272 0.3099223 5.962508 0.2030440 0.03345880 0.1713526
## 20     20 7.417713 0.3103827 5.959190 0.2007986 0.03398335 0.1699050
## 21     21 7.418911 0.3101611 5.962233 0.1954894 0.03328133 0.1692500
## 22     22 7.424423 0.3092196 5.966400 0.1961661 0.03386647 0.1684331
## 23     23 7.415982 0.3108241 5.958998 0.2030614 0.03455290 0.1762039
## 24     24 7.415269 0.3110428 5.963900 0.2051715 0.03392656 0.1797309
## 25     25 7.415506 0.3109968 5.963389 0.1953133 0.03378934 0.1770016
## 26     26 7.412032 0.3117154 5.960122 0.1955252 0.03338915 0.1765938
## 27     27 7.414963 0.3112114 5.965773 0.1977893 0.03361120 0.1795093
## 28     28 7.417700 0.3107358 5.966115 0.1971169 0.03375232 0.1780691
## 29     29 7.419764 0.3102886 5.968073 0.1954080 0.03202335 0.1761267
## 30     30 7.420295 0.3102660 5.968627 0.1968234 0.03277492 0.1799368
## 31     31 7.421289 0.3100751 5.971341 0.1967105 0.03327902 0.1821940
## 32     32 7.420444 0.3102476 5.969246 0.1984524 0.03376098 0.1799657
## 33     33 7.419642 0.3103899 5.968163 0.1940258 0.03304053 0.1775344
## 34     34 7.419467 0.3104557 5.970213 0.1986150 0.03274550 0.1805668
## 35     35 7.421622 0.3101207 5.969628 0.1968586 0.03235189 0.1790285
## 36     36 7.426997 0.3091845 5.974782 0.1960190 0.03243019 0.1792946
## 37     37 7.427208 0.3091668 5.976117 0.1954825 0.03184713 0.1771557
## 38     38 7.428055 0.3090251 5.977875 0.1938243 0.03217522 0.1750109
## 39     39 7.430736 0.3086394 5.981755 0.1949846 0.03309312 0.1742438
## 40     40 7.434463 0.3080472 5.982295 0.1968563 0.03314960 0.1751235
## 41     41 7.436495 0.3077134 5.983669 0.1952357 0.03304963 0.1733293
## 42     42 7.436842 0.3076795 5.985357 0.1947960 0.03319438 0.1756867
## 43     43 7.438201 0.3074924 5.986290 0.1976022 0.03340369 0.1772837
## 44     44 7.437004 0.3077065 5.986566 0.1963547 0.03342272 0.1766653
## 45     45 7.439847 0.3072513 5.986761 0.1946062 0.03359230 0.1726805
## 46     46 7.443452 0.3065900 5.991493 0.1950645 0.03322443 0.1738934
## 47     47 7.442533 0.3066851 5.989343 0.1936907 0.03207352 0.1702337
## 48     48 7.442738 0.3066421 5.989932 0.1919664 0.03158148 0.1715445
## 49     49 7.445685 0.3061271 5.988694 0.1930158 0.03163971 0.1743243
## 50     50 7.454101 0.3046683 5.995760 0.1956961 0.03095714 0.1740195
## 51     51 7.459767 0.3036615 6.001195 0.1947533 0.03011743 0.1723458
## 52     52 7.460244 0.3036013 6.002457 0.1944331 0.02986710 0.1732777
## 53     53 7.462601 0.3032314 6.003751 0.1958066 0.03024561 0.1767057
## 54     54 7.460366 0.3036232 6.001400 0.1987587 0.03064203 0.1807833
## 55     55 7.462090 0.3033732 6.003379 0.1982446 0.03120349 0.1816016
## 56     56 7.462891 0.3032311 6.005508 0.1964937 0.03138101 0.1787874
## 57     57 7.464554 0.3029981 6.005551 0.1947888 0.03155948 0.1749760
## 58     58 7.463472 0.3031897 6.006535 0.1946415 0.03146916 0.1748483
## 59     59 7.465664 0.3028480 6.007088 0.1954431 0.03141797 0.1758175
## 60     60 7.466879 0.3026280 6.008158 0.1953828 0.03149808 0.1766100
## 61     61 7.468635 0.3023536 6.009257 0.1952916 0.03152804 0.1783456
## 62     62 7.468503 0.3024336 6.008960 0.1947198 0.03157755 0.1786967
## 63     63 7.468219 0.3024786 6.010479 0.1933905 0.03121344 0.1771790
## 64     64 7.464739 0.3030557 6.008072 0.1920561 0.03056914 0.1753610
## 65     65 7.464879 0.3030540 6.010302 0.1968102 0.03087013 0.1789470
## 66     66 7.463636 0.3032725 6.009489 0.1954621 0.03102550 0.1780704
## 67     67 7.463269 0.3033580 6.009995 0.1932611 0.03151270 0.1770752
## 68     68 7.462434 0.3035051 6.010403 0.1907401 0.03102803 0.1742429
## 69     69 7.462886 0.3034618 6.008742 0.1906784 0.03104206 0.1725583
## 70     70 7.463123 0.3034011 6.009697 0.1876889 0.03040521 0.1694846
## 71     71 7.460532 0.3038581 6.007723 0.1840179 0.03027023 0.1670100
## 72     72 7.460338 0.3038979 6.006342 0.1818844 0.02998040 0.1658034
## 73     73 7.462140 0.3035976 6.007413 0.1825256 0.02979199 0.1667565
## 74     74 7.462729 0.3035140 6.007921 0.1827996 0.02968673 0.1659665
## 75     75 7.461762 0.3036771 6.005797 0.1795251 0.02971081 0.1617785
## 76     76 7.462023 0.3036600 6.005830 0.1775962 0.02960421 0.1603916
## 77     77 7.461291 0.3037946 6.005821 0.1778838 0.02938571 0.1613505
## 78     78 7.462275 0.3036101 6.007801 0.1756603 0.02911496 0.1606989
## 79     79 7.465156 0.3031050 6.009560 0.1766330 0.02885946 0.1602240
## 80     80 7.461502 0.3037857 6.007485 0.1770801 0.02874460 0.1611189
## 81     81 7.465990 0.3029997 6.011613 0.1773122 0.02889045 0.1614558
## 82     82 7.467410 0.3027374 6.012964 0.1768155 0.02854019 0.1602194
## 83     83 7.469210 0.3023943 6.015719 0.1781919 0.02857724 0.1614429
## 84     84 7.466844 0.3028285 6.011959 0.1769078 0.02877044 0.1633125
## 85     85 7.469427 0.3024149 6.013536 0.1740832 0.02847064 0.1602930
## 86     86 7.471292 0.3021645 6.015591 0.1739071 0.02851695 0.1600308
## 87     87 7.473260 0.3018167 6.017494 0.1732502 0.02859862 0.1596229
## 88     88 7.474822 0.3015472 6.019314 0.1729847 0.02870607 0.1592550
## 89     89 7.477386 0.3011548 6.022314 0.1713568 0.02837468 0.1587140
## 90     90 7.476260 0.3013398 6.019325 0.1706804 0.02820001 0.1591066
## 91     91 7.477408 0.3011651 6.019757 0.1673965 0.02833203 0.1559511
## 92     92 7.480093 0.3007025 6.023472 0.1656299 0.02813638 0.1541687
## 93     93 7.480903 0.3005725 6.023296 0.1638307 0.02800738 0.1523104
## 94     94 7.483120 0.3002193 6.026449 0.1658800 0.02775765 0.1530394
## 95     95 7.482205 0.3004087 6.024333 0.1636920 0.02802862 0.1534048
## 96     96 7.479981 0.3008084 6.022544 0.1645852 0.02860460 0.1534696
## 97     97 7.482969 0.3003413 6.026033 0.1655919 0.02863596 0.1547195
## 98     98 7.481007 0.3006754 6.024760 0.1652053 0.02841300 0.1527544
## 99     99 7.480439 0.3008083 6.025973 0.1663701 0.02827429 0.1535619
## 100   100 7.480128 0.3009157 6.023691 0.1696941 0.02860160 0.1570354
## 101   101 7.482445 0.3005168 6.024369 0.1730997 0.02891404 0.1597132
## 102   102 7.482838 0.3004979 6.025385 0.1745249 0.02897423 0.1599032
## 103   103 7.483100 0.3004913 6.024714 0.1734368 0.02895213 0.1595286
## 104   104 7.481947 0.3006716 6.023139 0.1743696 0.02868186 0.1596263
## 105   105 7.481811 0.3007234 6.024352 0.1742968 0.02878044 0.1598891
## 106   106 7.480959 0.3008652 6.022523 0.1763983 0.02865113 0.1604676
## 107   107 7.482765 0.3005587 6.023054 0.1764851 0.02862564 0.1614598
## 108   108 7.481605 0.3007877 6.022035 0.1778625 0.02868666 0.1625039
## 109   109 7.484069 0.3003726 6.024169 0.1770517 0.02888881 0.1615681
## 110   110 7.485097 0.3001831 6.025433 0.1795220 0.02880488 0.1623114
## 111   111 7.485451 0.3001057 6.025629 0.1772206 0.02856695 0.1607169
## 112   112 7.487026 0.2998624 6.027952 0.1764754 0.02837768 0.1592526
## 113   113 7.488442 0.2996257 6.029309 0.1783646 0.02855585 0.1595032
## 114   114 7.488235 0.2996774 6.028172 0.1785412 0.02894313 0.1579062
## 115   115 7.488713 0.2995844 6.029549 0.1787602 0.02850263 0.1593404
## 116   116 7.487469 0.2998198 6.029268 0.1808205 0.02870299 0.1599970
## 117   117 7.486327 0.3000226 6.028195 0.1808738 0.02889127 0.1616331
## 118   118 7.486276 0.3000086 6.027708 0.1833079 0.02901462 0.1638445
## 119   119 7.487338 0.2998344 6.028258 0.1854640 0.02915732 0.1646994
## 120   120 7.486394 0.3000110 6.027902 0.1824554 0.02893486 0.1618304
## 121   121 7.484009 0.3004507 6.026753 0.1831337 0.02898613 0.1614339
## 122   122 7.482997 0.3006400 6.024580 0.1826649 0.02909015 0.1625242
## 123   123 7.482171 0.3007835 6.023756 0.1800337 0.02893101 0.1605281
## 124   124 7.481071 0.3009875 6.021799 0.1803934 0.02846866 0.1613456
## 125   125 7.482232 0.3008014 6.021634 0.1814670 0.02827463 0.1625570
## 126   126 7.481502 0.3009511 6.020977 0.1815382 0.02822530 0.1630187
## 127   127 7.482014 0.3008617 6.020648 0.1817848 0.02862545 0.1626263
## 128   128 7.483585 0.3006170 6.022292 0.1811430 0.02895477 0.1616023
## 129   129 7.481480 0.3009762 6.020192 0.1797888 0.02907637 0.1605646
## 130   130 7.483460 0.3006289 6.021786 0.1790660 0.02910758 0.1605184
## 131   131 7.483107 0.3006829 6.021711 0.1795321 0.02900241 0.1609353
## 132   132 7.482092 0.3008727 6.019838 0.1787176 0.02902934 0.1606608
## 133   133 7.481158 0.3010486 6.019480 0.1788879 0.02898632 0.1618328
## 134   134 7.479812 0.3012738 6.018615 0.1782041 0.02896998 0.1617606
## 135   135 7.481181 0.3010456 6.019039 0.1789754 0.02917963 0.1622234
## 136   136 7.481527 0.3010060 6.019096 0.1795648 0.02936142 0.1630974
## 137   137 7.483376 0.3006999 6.020470 0.1807584 0.02926638 0.1634680
## 138   138 7.481984 0.3009300 6.019150 0.1810600 0.02934648 0.1636852
## 139   139 7.482675 0.3008436 6.019964 0.1813507 0.02945569 0.1640831
## 140   140 7.482824 0.3008335 6.019491 0.1825552 0.02975588 0.1651027
## 141   141 7.482279 0.3009351 6.020115 0.1823061 0.02965134 0.1650852
## 142   142 7.482474 0.3008888 6.019660 0.1821113 0.02947276 0.1649630
## 143   143 7.481860 0.3010275 6.019193 0.1812359 0.02950676 0.1642490
## 144   144 7.481294 0.3011355 6.017996 0.1814775 0.02952332 0.1633748
## 145   145 7.481548 0.3010744 6.018324 0.1822036 0.02920696 0.1632241
## 146   146 7.481776 0.3010343 6.018476 0.1809717 0.02902333 0.1620700
## 147   147 7.480346 0.3013053 6.017673 0.1793411 0.02915793 0.1609930
## 148   148 7.478763 0.3015912 6.016051 0.1786514 0.02916673 0.1597057
## 149   149 7.479040 0.3015671 6.016221 0.1793812 0.02929533 0.1602682
## 150   150 7.479327 0.3014998 6.017030 0.1794857 0.02922289 0.1609581
## 151   151 7.479041 0.3015441 6.016363 0.1793604 0.02927384 0.1608463
## 152   152 7.477661 0.3017654 6.015109 0.1804717 0.02946290 0.1618995
## 153   153 7.476982 0.3018873 6.013982 0.1802057 0.02932302 0.1616314
## 154   154 7.478503 0.3016241 6.014859 0.1821254 0.02952452 0.1628923
## 155   155 7.478509 0.3016351 6.014659 0.1829291 0.02970804 0.1631974
## 156   156 7.479887 0.3014100 6.017030 0.1825397 0.02957424 0.1630216
## 157   157 7.480425 0.3013579 6.016878 0.1831554 0.02990658 0.1632930
## 158   158 7.481245 0.3012340 6.017155 0.1832634 0.02997280 0.1633764
## 159   159 7.480333 0.3013631 6.017047 0.1845937 0.02991393 0.1651077
## 160   160 7.479888 0.3014615 6.015857 0.1846675 0.02998612 0.1644377
## 161   161 7.481210 0.3012470 6.016619 0.1844208 0.02997239 0.1643941
## 162   162 7.481237 0.3012394 6.016662 0.1843678 0.02983667 0.1637020
## 163   163 7.480427 0.3013810 6.016221 0.1837181 0.02968442 0.1636009
## 164   164 7.480962 0.3013017 6.016395 0.1835794 0.02966088 0.1639899
## 165   165 7.480850 0.3013169 6.016151 0.1840554 0.02965179 0.1633732
## 166   166 7.482862 0.3009751 6.017871 0.1850677 0.02975922 0.1634148
## 167   167 7.482645 0.3010053 6.017798 0.1848717 0.02972356 0.1626175
## 168   168 7.482692 0.3010149 6.018010 0.1845799 0.02987484 0.1626086
## 169   169 7.481354 0.3012427 6.017525 0.1841132 0.03000597 0.1611935
## 170   170 7.480918 0.3013086 6.017401 0.1833140 0.02999435 0.1606101
## 171   171 7.480083 0.3014364 6.015938 0.1828977 0.02988631 0.1603045
## 172   172 7.480094 0.3014498 6.015196 0.1832874 0.02993120 0.1600629
## 173   173 7.480534 0.3013688 6.014622 0.1819616 0.02973117 0.1593836
## 174   174 7.480489 0.3013852 6.015066 0.1821939 0.02967787 0.1598100
## 175   175 7.480764 0.3013381 6.015007 0.1817976 0.02948041 0.1596682
## 176   176 7.481243 0.3012544 6.014628 0.1813447 0.02957074 0.1594899
## 177   177 7.482678 0.3009944 6.015591 0.1811787 0.02957108 0.1598907
## 178   178 7.483913 0.3007792 6.016201 0.1816375 0.02948331 0.1599077
## 179   179 7.485003 0.3005920 6.017159 0.1807966 0.02928428 0.1602579
## 180   180 7.484789 0.3006392 6.016922 0.1817714 0.02945066 0.1612282
## 181   181 7.485704 0.3004858 6.017108 0.1823212 0.02956138 0.1615325
## 182   182 7.485430 0.3005254 6.017235 0.1832522 0.02975996 0.1620184
## 183   183 7.485078 0.3005934 6.017387 0.1838556 0.02975332 0.1627258
## 184   184 7.485577 0.3005169 6.018194 0.1825674 0.02975885 0.1612590
## 185   185 7.484987 0.3006119 6.017830 0.1827391 0.02972269 0.1617993
## 186   186 7.485088 0.3005828 6.018604 0.1826550 0.02980549 0.1617411
## 187   187 7.483924 0.3007856 6.017842 0.1822873 0.02971202 0.1612610
## 188   188 7.483933 0.3007761 6.017861 0.1824287 0.02966992 0.1613999
## 189   189 7.484617 0.3006553 6.017886 0.1831280 0.02960829 0.1616610
## 190   190 7.483764 0.3007925 6.017533 0.1831256 0.02954039 0.1614633
## 191   191 7.483680 0.3008105 6.017623 0.1832266 0.02939642 0.1619034
## 192   192 7.483806 0.3007899 6.017761 0.1820739 0.02934744 0.1608026
## 193   193 7.483795 0.3007980 6.018174 0.1815386 0.02932701 0.1603644
## 194   194 7.483603 0.3008373 6.018790 0.1815266 0.02919488 0.1609116
## 195   195 7.483214 0.3009007 6.018130 0.1815288 0.02916087 0.1607377
## 196   196 7.483225 0.3009026 6.018451 0.1812055 0.02912382 0.1605011
## 197   197 7.483340 0.3008832 6.018365 0.1812692 0.02911356 0.1610353
## 198   198 7.482784 0.3009791 6.018000 0.1819880 0.02911756 0.1611758
## 199   199 7.483268 0.3008966 6.018335 0.1813773 0.02902510 0.1609913
## 200   200 7.483485 0.3008531 6.018738 0.1811158 0.02894172 0.1610211
## 201   201 7.483710 0.3008244 6.019235 0.1812392 0.02899370 0.1610128
## 202   202 7.483452 0.3008719 6.018984 0.1815288 0.02902675 0.1608282
## 203   203 7.483470 0.3008647 6.019293 0.1814845 0.02895399 0.1610469
## 204   204 7.483601 0.3008427 6.019563 0.1815816 0.02890602 0.1615091
## 205   205 7.483740 0.3008149 6.019724 0.1818313 0.02899339 0.1616071
## 206   206 7.483539 0.3008555 6.019328 0.1821243 0.02892088 0.1616466
## 207   207 7.483902 0.3007911 6.019534 0.1822014 0.02901191 0.1614660
## 208   208 7.484087 0.3007581 6.019569 0.1823579 0.02907633 0.1616644
## 209   209 7.483420 0.3008746 6.018831 0.1826222 0.02913582 0.1618397
## 210   210 7.483889 0.3007967 6.019470 0.1827671 0.02917941 0.1619161
## 211   211 7.484136 0.3007569 6.019802 0.1823439 0.02908974 0.1615877
## 212   212 7.484250 0.3007415 6.020120 0.1825087 0.02913662 0.1617496
## 213   213 7.484230 0.3007430 6.020073 0.1821554 0.02907069 0.1615358
## 214   214 7.484500 0.3006966 6.020422 0.1824414 0.02909072 0.1615260
## 215   215 7.484778 0.3006531 6.020474 0.1824007 0.02899335 0.1615344
## 216   216 7.485185 0.3005861 6.020988 0.1819639 0.02898192 0.1613239
## 217   217 7.485561 0.3005281 6.021225 0.1819005 0.02904631 0.1612285
## 218   218 7.485676 0.3005104 6.021167 0.1817411 0.02897691 0.1611274
## 219   219 7.485282 0.3005727 6.020867 0.1816656 0.02894700 0.1613425
## 220   220 7.485534 0.3005338 6.021178 0.1818179 0.02895913 0.1613285
## 221   221 7.485310 0.3005733 6.021241 0.1816350 0.02895481 0.1611551
## 222   222 7.485330 0.3005696 6.021271 0.1815238 0.02892491 0.1613296
## 223   223 7.485533 0.3005342 6.021232 0.1815954 0.02897600 0.1614133
## 224   224 7.485393 0.3005600 6.021279 0.1816679 0.02897485 0.1615359
## 225   225 7.485507 0.3005400 6.021474 0.1814428 0.02895719 0.1614197
## 226   226 7.485774 0.3004966 6.021803 0.1813580 0.02901401 0.1612259
## 227   227 7.485958 0.3004613 6.022085 0.1812933 0.02899000 0.1609602
## 228   228 7.485903 0.3004686 6.022126 0.1813543 0.02896481 0.1611853
## 229   229 7.485865 0.3004746 6.022094 0.1813131 0.02895796 0.1611545
## 230   230 7.485790 0.3004865 6.022059 0.1810982 0.02895731 0.1609416
## 231   231 7.485832 0.3004797 6.021987 0.1811091 0.02894470 0.1608509
## 232   232 7.485895 0.3004679 6.022014 0.1811359 0.02891979 0.1608564
## 233   233 7.485961 0.3004561 6.022020 0.1813644 0.02891960 0.1609937
## 234   234 7.486026 0.3004439 6.022105 0.1813039 0.02891242 0.1609771
## 235   235 7.486028 0.3004447 6.022125 0.1813252 0.02891942 0.1610493
## 236   236 7.486036 0.3004439 6.022118 0.1812701 0.02891840 0.1610519
## 237   237 7.486065 0.3004386 6.022129 0.1812642 0.02891534 0.1610839
## 238   238 7.486033 0.3004442 6.022107 0.1812514 0.02891806 0.1610742
## 239   239 7.486022 0.3004460 6.022095 0.1812484 0.02891771 0.1610637
## 240   240 7.486026 0.3004453 6.022099 0.1812374 0.02891745 0.1610546
##    nvmax
## 26    26

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  8.779440e+01 -1.557120e-02  3.403876e+00  1.539538e-01  9.284070e-01 
##           x10           x11           x16           x17           x21 
##  4.688309e-01  5.283539e+07  2.962353e-01  4.203017e-01  2.574440e-02 
##         stat4         stat6        stat14        stat23        stat24 
## -1.596461e-01 -1.395450e-01 -2.564100e-01  1.449456e-01 -1.415226e-01 
##        stat33        stat35        stat38        stat41        stat98 
## -1.700816e-01 -1.421244e-01  1.890886e-01 -1.606934e-01  9.455994e-01 
##       stat100       stat110       stat144       stat149       stat156 
##  1.878400e-01 -8.985658e-01  1.437621e-01 -1.939056e-01  2.086850e-01 
##       stat172      sqrt.x18 
##  1.666971e-01  7.327670e+00

Test

if (algo.forward.caret == TRUE){
  test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   106.4   120.8   124.2   124.2   127.9   138.9 
## [1] "leapForward  Test MSE: 94.1712443109364"

Backward Elimination

Train

if (algo.backward == TRUE){
  # Takes too much time
  t1 = Sys.time()
  
  model.backward = step(model.full, data = data.train, direction="backward", trace = 0)
  print(summary(model.backward))

  t2 = Sys.time()
  print (paste("Time taken for Backward Elimination: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.backward, data.train)
}

Test

if (algo.backward == TRUE){
  test.model(model.backard, data.test, "Backward Elimination")
}

Backward Elimination with CV (w/ full train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapBackward"
                                   ,feature.names =  feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 9 on full training set
##     nvmax      RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 10.210693 0.1018765 7.806589 0.3486064 0.02595312 0.1962709
## 2       2  9.979515 0.1418363 7.597398 0.3207096 0.02875352 0.1777050
## 3       3  9.814897 0.1693658 7.438338 0.3290643 0.02785753 0.1807549
## 4       4  9.667493 0.1938902 7.231990 0.3201540 0.02682717 0.1803033
## 5       5  9.588875 0.2068466 7.172167 0.3419604 0.02484619 0.1975346
## 6       6  9.591660 0.2066208 7.174795 0.3521685 0.02739708 0.2020679
## 7       7  9.571737 0.2099116 7.168707 0.3525200 0.02833739 0.2076171
## 8       8  9.546348 0.2140867 7.150908 0.3571826 0.02810121 0.2058362
## 9       9  9.521497 0.2180754 7.135342 0.3515932 0.02753943 0.1919503
## 10     10  9.531634 0.2164981 7.142145 0.3502059 0.02798899 0.1947027
## 11     11  9.531499 0.2165176 7.142129 0.3493156 0.02735269 0.1916070
## 12     12  9.541275 0.2149978 7.148145 0.3511383 0.02743081 0.1879036
## 13     13  9.539867 0.2151993 7.145155 0.3456114 0.02644858 0.1873780
## 14     14  9.532611 0.2163848 7.137358 0.3470188 0.02761253 0.1915646
## 15     15  9.528434 0.2170681 7.130122 0.3501793 0.02790495 0.1974392
## 16     16  9.533248 0.2163267 7.130545 0.3528825 0.02850968 0.1979491
## 17     17  9.534851 0.2160552 7.129357 0.3505236 0.02748267 0.1945657
## 18     18  9.536197 0.2158903 7.134786 0.3468297 0.02764942 0.1942370
## 19     19  9.536516 0.2158327 7.137370 0.3465569 0.02819126 0.1926959
## 20     20  9.537824 0.2156687 7.135493 0.3478763 0.02871909 0.1898675
## 21     21  9.536678 0.2159513 7.131799 0.3583694 0.02959617 0.1937490
## 22     22  9.541966 0.2150980 7.135841 0.3539049 0.02907704 0.1940634
## 23     23  9.545257 0.2145512 7.138423 0.3525134 0.02890386 0.1901105
## 24     24  9.552337 0.2134013 7.144349 0.3532654 0.02854211 0.1900327
## 25     25  9.559340 0.2124073 7.153893 0.3595815 0.02969110 0.1953156
## 26     26  9.564942 0.2114551 7.160867 0.3533027 0.02865346 0.1898825
## 27     27  9.567153 0.2111656 7.162236 0.3551718 0.02879216 0.1906402
## 28     28  9.578157 0.2094785 7.165991 0.3565853 0.02874149 0.1899061
## 29     29  9.585088 0.2084604 7.168349 0.3602446 0.02912832 0.1918108
## 30     30  9.591335 0.2075436 7.171271 0.3663210 0.02935303 0.1971488
## 31     31  9.598155 0.2065258 7.178620 0.3631845 0.02895114 0.1969082
## 32     32  9.601486 0.2060684 7.183593 0.3675440 0.02949985 0.2051137
## 33     33  9.604653 0.2055913 7.185761 0.3634878 0.02906543 0.2051718
## 34     34  9.609242 0.2049042 7.187453 0.3652655 0.02890848 0.2104810
## 35     35  9.611277 0.2045781 7.190543 0.3586124 0.02837577 0.2091363
## 36     36  9.617242 0.2037266 7.197623 0.3591314 0.02839541 0.2085561
## 37     37  9.618621 0.2035180 7.199299 0.3572157 0.02780645 0.2103565
## 38     38  9.623021 0.2028526 7.199195 0.3576192 0.02780702 0.2100995
## 39     39  9.627222 0.2022288 7.202209 0.3583861 0.02814230 0.2132511
## 40     40  9.628373 0.2020733 7.205311 0.3583978 0.02777717 0.2127988
## 41     41  9.633754 0.2012107 7.211548 0.3573159 0.02727663 0.2123332
## 42     42  9.637859 0.2005687 7.215515 0.3590490 0.02730294 0.2129728
## 43     43  9.643917 0.1996767 7.222107 0.3574981 0.02702196 0.2098385
## 44     44  9.647104 0.1992357 7.223696 0.3604460 0.02746196 0.2119928
## 45     45  9.646832 0.1993935 7.224951 0.3618233 0.02810861 0.2159876
## 46     46  9.650030 0.1988921 7.225393 0.3591478 0.02801080 0.2156596
## 47     47  9.650792 0.1988018 7.224751 0.3558083 0.02807442 0.2112062
## 48     48  9.652924 0.1985198 7.227251 0.3568715 0.02854845 0.2141574
## 49     49  9.658965 0.1976209 7.230449 0.3559228 0.02817482 0.2151760
## 50     50  9.663520 0.1969755 7.236301 0.3582745 0.02841885 0.2188495
## 51     51  9.665446 0.1966864 7.238810 0.3582130 0.02798934 0.2160960
## 52     52  9.667487 0.1963912 7.239920 0.3564635 0.02800182 0.2119510
## 53     53  9.668056 0.1963089 7.241985 0.3598707 0.02801428 0.2153900
## 54     54  9.670989 0.1959324 7.245157 0.3591777 0.02773111 0.2130229
## 55     55  9.675101 0.1953436 7.249609 0.3577183 0.02739725 0.2110367
## 56     56  9.676830 0.1951037 7.250394 0.3623240 0.02744127 0.2139670
## 57     57  9.679797 0.1946754 7.249255 0.3615492 0.02742856 0.2143840
## 58     58  9.679046 0.1947743 7.251771 0.3603634 0.02734686 0.2122990
## 59     59  9.676757 0.1951176 7.251144 0.3616330 0.02751501 0.2163119
## 60     60  9.677988 0.1949203 7.248823 0.3591144 0.02738875 0.2136221
## 61     61  9.682974 0.1941887 7.255781 0.3568176 0.02741414 0.2108565
## 62     62  9.681945 0.1943356 7.256227 0.3553458 0.02745293 0.2088039
## 63     63  9.684731 0.1939745 7.256336 0.3563263 0.02747387 0.2084639
## 64     64  9.686669 0.1937188 7.256965 0.3567159 0.02762404 0.2105528
## 65     65  9.690756 0.1931640 7.259871 0.3593361 0.02760911 0.2143642
## 66     66  9.693208 0.1928502 7.263137 0.3569962 0.02749936 0.2144834
## 67     67  9.691543 0.1931161 7.260283 0.3565026 0.02721116 0.2143375
## 68     68  9.690978 0.1932330 7.259098 0.3566041 0.02728468 0.2118798
## 69     69  9.688821 0.1935501 7.257127 0.3576551 0.02745005 0.2115901
## 70     70  9.684935 0.1941809 7.253396 0.3592030 0.02775692 0.2102125
## 71     71  9.687518 0.1937960 7.256291 0.3539772 0.02713243 0.2037136
## 72     72  9.687527 0.1937583 7.255942 0.3527590 0.02663387 0.2048789
## 73     73  9.686302 0.1939830 7.257381 0.3523154 0.02660123 0.2024226
## 74     74  9.685768 0.1940980 7.254324 0.3498765 0.02653866 0.2015641
## 75     75  9.688213 0.1937263 7.255980 0.3471208 0.02657223 0.1974896
## 76     76  9.687497 0.1938721 7.255461 0.3487272 0.02669801 0.1979915
## 77     77  9.690580 0.1934465 7.257197 0.3469125 0.02691485 0.1960903
## 78     78  9.691207 0.1933821 7.257562 0.3473227 0.02669748 0.1964870
## 79     79  9.691675 0.1933438 7.258956 0.3472902 0.02641815 0.1968501
## 80     80  9.693854 0.1930749 7.261112 0.3508407 0.02704887 0.1976880
## 81     81  9.692890 0.1932319 7.260472 0.3524775 0.02739940 0.1985055
## 82     82  9.692245 0.1933068 7.261453 0.3511637 0.02684694 0.1992143
## 83     83  9.693466 0.1931490 7.262818 0.3504703 0.02650340 0.2003574
## 84     84  9.694028 0.1931134 7.263768 0.3507373 0.02677541 0.2009263
## 85     85  9.694468 0.1930435 7.264639 0.3495535 0.02668432 0.2007437
## 86     86  9.694891 0.1930021 7.264524 0.3488007 0.02657575 0.1999154
## 87     87  9.693891 0.1931946 7.262981 0.3539819 0.02683954 0.2072625
## 88     88  9.695446 0.1929828 7.265615 0.3535711 0.02666659 0.2058616
## 89     89  9.693228 0.1933061 7.264339 0.3500042 0.02649108 0.2019296
## 90     90  9.694645 0.1930854 7.265619 0.3484994 0.02670515 0.2009905
## 91     91  9.693245 0.1933070 7.265675 0.3511601 0.02724310 0.2028728
## 92     92  9.694332 0.1931339 7.267006 0.3483889 0.02696075 0.2022797
## 93     93  9.695279 0.1930057 7.268581 0.3490137 0.02692229 0.2021526
## 94     94  9.694399 0.1931330 7.268371 0.3509579 0.02687495 0.2017675
## 95     95  9.696027 0.1928964 7.267307 0.3503583 0.02674838 0.2022347
## 96     96  9.695892 0.1928995 7.267799 0.3466564 0.02677999 0.2001109
## 97     97  9.694797 0.1931080 7.265956 0.3466219 0.02693330 0.1994421
## 98     98  9.692698 0.1934263 7.264149 0.3479106 0.02733316 0.2008395
## 99     99  9.693191 0.1933653 7.266731 0.3459556 0.02708994 0.2000581
## 100   100  9.694230 0.1931909 7.267642 0.3453334 0.02695419 0.2006398
## 101   101  9.694665 0.1931135 7.269663 0.3463024 0.02671130 0.2018744
## 102   102  9.696332 0.1928894 7.270051 0.3463249 0.02700542 0.2009760
## 103   103  9.693765 0.1932943 7.267977 0.3479165 0.02699315 0.2023682
## 104   104  9.693350 0.1933374 7.267306 0.3461054 0.02681763 0.2001660
## 105   105  9.694099 0.1932599 7.265960 0.3464355 0.02679231 0.2003628
## 106   106  9.694248 0.1932680 7.264014 0.3495186 0.02704639 0.2035439
## 107   107  9.696082 0.1930190 7.263139 0.3487028 0.02680455 0.2043117
## 108   108  9.697494 0.1928198 7.264114 0.3472730 0.02663206 0.2036389
## 109   109  9.697918 0.1928037 7.264428 0.3465157 0.02637458 0.2035872
## 110   110  9.697535 0.1928892 7.263764 0.3465883 0.02648241 0.2018739
## 111   111  9.699857 0.1925462 7.265406 0.3444297 0.02631810 0.2001005
## 112   112  9.700671 0.1924654 7.266938 0.3425239 0.02628710 0.1999002
## 113   113  9.699430 0.1926423 7.267804 0.3407401 0.02613954 0.2001406
## 114   114  9.700733 0.1924807 7.268004 0.3424886 0.02614898 0.2015903
## 115   115  9.701195 0.1924579 7.268317 0.3418141 0.02638172 0.2002767
## 116   116  9.701859 0.1923904 7.267324 0.3421685 0.02631705 0.1999053
## 117   117  9.703508 0.1921210 7.268201 0.3392631 0.02624918 0.1967074
## 118   118  9.702222 0.1922814 7.266561 0.3379846 0.02607425 0.1955241
## 119   119  9.701533 0.1923890 7.266576 0.3383208 0.02610860 0.1979635
## 120   120  9.699171 0.1927467 7.267153 0.3383421 0.02625200 0.1986509
## 121   121  9.700917 0.1925431 7.267551 0.3398877 0.02635284 0.1980885
## 122   122  9.699888 0.1926985 7.267375 0.3390506 0.02639720 0.1973481
## 123   123  9.699886 0.1927058 7.267415 0.3388495 0.02651560 0.1963265
## 124   124  9.700670 0.1925790 7.268024 0.3375058 0.02619287 0.1936089
## 125   125  9.700155 0.1926996 7.268723 0.3380325 0.02606751 0.1940960
## 126   126  9.699130 0.1928734 7.268690 0.3374778 0.02625507 0.1936276
## 127   127  9.700602 0.1926540 7.270243 0.3362610 0.02614090 0.1931906
## 128   128  9.701390 0.1925252 7.271134 0.3360868 0.02625250 0.1925629
## 129   129  9.701509 0.1925126 7.271675 0.3363617 0.02629810 0.1926265
## 130   130  9.701786 0.1925010 7.271268 0.3365147 0.02632620 0.1929906
## 131   131  9.700856 0.1926622 7.271408 0.3370343 0.02648114 0.1942717
## 132   132  9.701056 0.1926271 7.271316 0.3365443 0.02617507 0.1926948
## 133   133  9.701189 0.1926198 7.270559 0.3383092 0.02625815 0.1946690
## 134   134  9.699918 0.1928212 7.270029 0.3389727 0.02628794 0.1947150
## 135   135  9.700284 0.1927719 7.271585 0.3387967 0.02622649 0.1959216
## 136   136  9.700587 0.1927551 7.271776 0.3382973 0.02627889 0.1955920
## 137   137  9.700030 0.1928449 7.271412 0.3391261 0.02664764 0.1956274
## 138   138  9.701284 0.1926953 7.272473 0.3398064 0.02659261 0.1967699
## 139   139  9.702943 0.1924654 7.273342 0.3383317 0.02652047 0.1949653
## 140   140  9.702719 0.1924891 7.273287 0.3383299 0.02671578 0.1942982
## 141   141  9.701634 0.1926481 7.272199 0.3363367 0.02656415 0.1925802
## 142   142  9.702920 0.1924607 7.273210 0.3357138 0.02644660 0.1913421
## 143   143  9.704954 0.1921832 7.275617 0.3357353 0.02659447 0.1905552
## 144   144  9.704304 0.1922734 7.275483 0.3359528 0.02653808 0.1893606
## 145   145  9.704598 0.1922769 7.276437 0.3357202 0.02671569 0.1887101
## 146   146  9.706156 0.1920346 7.277584 0.3367335 0.02683361 0.1901965
## 147   147  9.705219 0.1921676 7.276732 0.3368533 0.02676368 0.1901619
## 148   148  9.706015 0.1920676 7.277644 0.3359080 0.02682262 0.1905669
## 149   149  9.707198 0.1919343 7.278786 0.3376662 0.02713818 0.1912218
## 150   150  9.706231 0.1920750 7.277242 0.3367319 0.02705782 0.1891128
## 151   151  9.706870 0.1919910 7.277071 0.3382122 0.02706506 0.1892865
## 152   152  9.705992 0.1921238 7.276278 0.3383902 0.02701045 0.1900168
## 153   153  9.704477 0.1923266 7.275587 0.3382009 0.02706117 0.1907077
## 154   154  9.704781 0.1922846 7.276010 0.3380793 0.02696273 0.1904201
## 155   155  9.704316 0.1923475 7.275390 0.3380834 0.02686155 0.1910122
## 156   156  9.703228 0.1925016 7.273727 0.3383367 0.02675757 0.1896602
## 157   157  9.703627 0.1924222 7.273189 0.3382846 0.02674206 0.1902202
## 158   158  9.703481 0.1924357 7.273005 0.3390063 0.02666209 0.1908757
## 159   159  9.704909 0.1922154 7.274037 0.3393491 0.02674079 0.1902913
## 160   160  9.703127 0.1924782 7.271705 0.3401949 0.02673237 0.1908518
## 161   161  9.702727 0.1925302 7.271220 0.3386306 0.02661123 0.1891912
## 162   162  9.702753 0.1925473 7.271521 0.3395543 0.02675071 0.1903630
## 163   163  9.701532 0.1927338 7.270820 0.3410812 0.02700245 0.1905563
## 164   164  9.701696 0.1927149 7.271155 0.3414445 0.02685013 0.1909318
## 165   165  9.702190 0.1926441 7.272242 0.3417180 0.02668801 0.1908188
## 166   166  9.702391 0.1926209 7.272864 0.3412582 0.02663495 0.1899455
## 167   167  9.702464 0.1926121 7.272514 0.3411960 0.02674623 0.1892967
## 168   168  9.702612 0.1926056 7.272601 0.3421849 0.02706887 0.1892675
## 169   169  9.702930 0.1925533 7.272853 0.3420169 0.02695621 0.1891021
## 170   170  9.702514 0.1926427 7.272702 0.3442184 0.02721634 0.1924847
## 171   171  9.702272 0.1926780 7.272644 0.3431826 0.02721253 0.1921730
## 172   172  9.701911 0.1927534 7.272337 0.3444367 0.02733291 0.1932607
## 173   173  9.701632 0.1927780 7.272172 0.3444195 0.02730727 0.1940425
## 174   174  9.700646 0.1929184 7.271452 0.3432577 0.02732081 0.1930826
## 175   175  9.700944 0.1928960 7.271902 0.3432280 0.02742227 0.1932193
## 176   176  9.701463 0.1928216 7.271954 0.3426500 0.02725610 0.1925960
## 177   177  9.702926 0.1926085 7.272968 0.3432448 0.02711976 0.1933876
## 178   178  9.703376 0.1925637 7.273522 0.3456493 0.02738826 0.1955659
## 179   179  9.703464 0.1925618 7.273980 0.3462369 0.02728684 0.1952728
## 180   180  9.703607 0.1925353 7.274232 0.3461024 0.02721610 0.1960882
## 181   181  9.703679 0.1925261 7.274611 0.3460015 0.02729047 0.1962450
## 182   182  9.704920 0.1923432 7.275517 0.3465744 0.02750705 0.1967721
## 183   183  9.704172 0.1924545 7.275013 0.3464381 0.02752247 0.1955989
## 184   184  9.704787 0.1923746 7.275449 0.3458979 0.02752517 0.1953603
## 185   185  9.705034 0.1923464 7.275480 0.3460214 0.02747340 0.1950640
## 186   186  9.705830 0.1922384 7.275870 0.3455084 0.02736277 0.1955410
## 187   187  9.704883 0.1923773 7.275090 0.3450902 0.02726262 0.1957395
## 188   188  9.705612 0.1922647 7.275936 0.3448850 0.02722264 0.1954333
## 189   189  9.705123 0.1923216 7.274658 0.3443437 0.02721938 0.1955785
## 190   190  9.706257 0.1921484 7.275502 0.3438829 0.02718141 0.1947806
## 191   191  9.706458 0.1921066 7.275537 0.3430451 0.02688195 0.1948499
## 192   192  9.706955 0.1920259 7.276070 0.3432975 0.02686359 0.1951445
## 193   193  9.707689 0.1919251 7.276148 0.3432987 0.02686485 0.1947790
## 194   194  9.707287 0.1919870 7.276099 0.3435543 0.02689076 0.1950857
## 195   195  9.707731 0.1919316 7.276503 0.3430502 0.02685739 0.1948084
## 196   196  9.706798 0.1920676 7.276143 0.3427109 0.02693133 0.1941550
## 197   197  9.706856 0.1920773 7.275998 0.3433947 0.02698426 0.1947861
## 198   198  9.706930 0.1920668 7.275930 0.3439761 0.02702817 0.1951985
## 199   199  9.706706 0.1920892 7.276041 0.3433670 0.02691884 0.1943902
## 200   200  9.706955 0.1920651 7.276634 0.3436436 0.02698959 0.1950841
## 201   201  9.706813 0.1920812 7.276180 0.3431026 0.02697407 0.1948717
## 202   202  9.707248 0.1920176 7.276412 0.3426836 0.02694603 0.1949861
## 203   203  9.706840 0.1920819 7.276036 0.3427547 0.02698682 0.1947028
## 204   204  9.706945 0.1920697 7.276208 0.3418236 0.02690521 0.1944195
## 205   205  9.707119 0.1920516 7.276126 0.3422049 0.02686679 0.1948274
## 206   206  9.707184 0.1920361 7.276479 0.3417104 0.02686366 0.1949266
## 207   207  9.707510 0.1919813 7.277028 0.3414082 0.02681033 0.1946199
## 208   208  9.707079 0.1920474 7.276749 0.3423171 0.02687667 0.1953371
## 209   209  9.707098 0.1920555 7.276526 0.3423560 0.02689591 0.1952065
## 210   210  9.706889 0.1920889 7.276527 0.3423302 0.02688841 0.1946550
## 211   211  9.707230 0.1920369 7.276482 0.3420154 0.02689284 0.1944229
## 212   212  9.707232 0.1920256 7.276565 0.3417825 0.02685283 0.1942663
## 213   213  9.707529 0.1919811 7.276811 0.3422276 0.02685810 0.1946583
## 214   214  9.706872 0.1920779 7.276312 0.3421563 0.02687518 0.1946258
## 215   215  9.707125 0.1920436 7.276537 0.3423709 0.02688269 0.1948166
## 216   216  9.707523 0.1919918 7.276899 0.3421291 0.02683683 0.1943649
## 217   217  9.707587 0.1919836 7.276913 0.3417189 0.02684910 0.1941020
## 218   218  9.707312 0.1920244 7.276848 0.3418386 0.02686413 0.1943437
## 219   219  9.707181 0.1920418 7.276845 0.3416553 0.02685859 0.1940363
## 220   220  9.707422 0.1920032 7.277065 0.3416047 0.02686831 0.1939818
## 221   221  9.707501 0.1919944 7.277178 0.3418774 0.02689742 0.1940936
## 222   222  9.707310 0.1920203 7.276914 0.3417660 0.02687880 0.1941040
## 223   223  9.707131 0.1920444 7.276929 0.3416600 0.02687449 0.1941464
## 224   224  9.707377 0.1920082 7.277091 0.3416442 0.02686254 0.1942232
## 225   225  9.707262 0.1920221 7.276725 0.3415707 0.02684678 0.1942614
## 226   226  9.707045 0.1920539 7.276607 0.3416431 0.02685931 0.1943162
## 227   227  9.706838 0.1920822 7.276457 0.3414971 0.02684527 0.1940610
## 228   228  9.706890 0.1920750 7.276668 0.3413004 0.02681223 0.1939648
## 229   229  9.706862 0.1920806 7.276604 0.3413612 0.02683954 0.1941413
## 230   230  9.706910 0.1920736 7.276613 0.3413077 0.02683709 0.1940985
## 231   231  9.706816 0.1920869 7.276530 0.3414668 0.02684573 0.1942758
## 232   232  9.706798 0.1920900 7.276554 0.3414690 0.02683953 0.1942318
## 233   233  9.706781 0.1920922 7.276498 0.3415837 0.02685977 0.1943116
## 234   234  9.706862 0.1920808 7.276539 0.3414934 0.02684693 0.1942315
## 235   235  9.706860 0.1920807 7.276577 0.3414986 0.02684323 0.1942913
## 236   236  9.706824 0.1920873 7.276570 0.3415444 0.02684716 0.1942983
## 237   237  9.706853 0.1920834 7.276587 0.3415864 0.02684751 0.1943318
## 238   238  9.706906 0.1920760 7.276629 0.3415859 0.02684647 0.1943426
## 239   239  9.706889 0.1920785 7.276621 0.3416035 0.02684944 0.1943714
## 240   240  9.706892 0.1920777 7.276628 0.3415893 0.02684758 0.1943579
##   nvmax
## 9     9

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

## (Intercept)          x4          x7          x9         x10         x16 
## 96.70112202 -0.01426636  3.24632946  0.95963994  0.38442187  0.28876623 
##         x17      stat98     stat110    sqrt.x18 
##  0.43713626  1.02673501 -0.96934667  7.48687040

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   109.7   122.1   125.4   125.4   129.1   142.0 
## [1] "leapBackward  Test MSE: 93.4589126618511"

Backward Elimination with CV (w/ filtered train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapBackward"
                                   ,feature.names = feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 26 on full training set
##     nvmax     RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 8.276677 0.1410643 6.684123 0.2155831 0.02632973 0.1530131
## 2       2 7.964853 0.2051380 6.452767 0.1909273 0.02994904 0.1500269
## 3       3 7.805053 0.2361881 6.289234 0.2298029 0.03151009 0.1643719
## 4       4 7.622695 0.2714980 6.087228 0.2226076 0.03167759 0.1714596
## 5       5 7.529243 0.2894565 6.016453 0.2136827 0.03150015 0.1652114
## 6       6 7.510020 0.2933011 6.013178 0.2178123 0.03393791 0.1754908
## 7       7 7.477940 0.2991413 5.998621 0.1977571 0.03158270 0.1635809
## 8       8 7.457692 0.3027901 5.991982 0.2007623 0.03129776 0.1654502
## 9       9 7.426062 0.3086808 5.971902 0.2009964 0.03261263 0.1703405
## 10     10 7.426107 0.3086253 5.970053 0.2029215 0.03306123 0.1751513
## 11     11 7.414367 0.3108614 5.961918 0.1922128 0.03366771 0.1725217
## 12     12 7.414979 0.3107081 5.958373 0.1988920 0.03250733 0.1713669
## 13     13 7.419061 0.3100373 5.959337 0.2045511 0.03370128 0.1767538
## 14     14 7.423021 0.3093897 5.964115 0.2061276 0.03416069 0.1770958
## 15     15 7.426975 0.3086460 5.964136 0.2005644 0.03431245 0.1742788
## 16     16 7.423712 0.3092999 5.963404 0.2059067 0.03562611 0.1774693
## 17     17 7.421415 0.3097418 5.961632 0.2009623 0.03462736 0.1699046
## 18     18 7.425364 0.3089498 5.964703 0.1965068 0.03381535 0.1668292
## 19     19 7.426429 0.3087858 5.966779 0.1942314 0.03253504 0.1641963
## 20     20 7.420735 0.3097839 5.962694 0.1904969 0.03268285 0.1655002
## 21     21 7.417451 0.3104281 5.958636 0.1940692 0.03319040 0.1685503
## 22     22 7.417843 0.3104054 5.958917 0.1996032 0.03316233 0.1734208
## 23     23 7.411809 0.3116230 5.954806 0.2066194 0.03419916 0.1807268
## 24     24 7.416370 0.3108508 5.962561 0.2039085 0.03401727 0.1814145
## 25     25 7.417251 0.3106764 5.964876 0.1962715 0.03330188 0.1776935
## 26     26 7.411350 0.3118360 5.960283 0.1951685 0.03357305 0.1755140
## 27     27 7.414032 0.3113712 5.964218 0.1988576 0.03352832 0.1813936
## 28     28 7.417734 0.3107293 5.965755 0.1970509 0.03374472 0.1787530
## 29     29 7.419768 0.3102819 5.967800 0.1954006 0.03201553 0.1766429
## 30     30 7.419563 0.3103889 5.966026 0.1990140 0.03271711 0.1793984
## 31     31 7.422214 0.3099124 5.971182 0.2002072 0.03348862 0.1828807
## 32     32 7.420444 0.3102476 5.969246 0.1984524 0.03376098 0.1799657
## 33     33 7.419446 0.3104476 5.967172 0.1952250 0.03308624 0.1773729
## 34     34 7.420581 0.3102763 5.967490 0.1991398 0.03301749 0.1813087
## 35     35 7.422748 0.3099213 5.970411 0.1972287 0.03261898 0.1795422
## 36     36 7.427236 0.3091648 5.975035 0.1957071 0.03252185 0.1781542
## 37     37 7.424921 0.3096022 5.974007 0.1939252 0.03228963 0.1749930
## 38     38 7.425013 0.3096528 5.973786 0.1927712 0.03233700 0.1751939
## 39     39 7.429927 0.3088347 5.978202 0.1949028 0.03288434 0.1762552
## 40     40 7.433224 0.3083147 5.980247 0.1966553 0.03278040 0.1777586
## 41     41 7.435000 0.3080562 5.983696 0.1956602 0.03345355 0.1747626
## 42     42 7.437596 0.3075953 5.984304 0.1952054 0.03326415 0.1751041
## 43     43 7.437450 0.3076086 5.985777 0.1971093 0.03332401 0.1769541
## 44     44 7.439724 0.3071832 5.988489 0.1973279 0.03249548 0.1772975
## 45     45 7.444587 0.3062874 5.991143 0.1929155 0.03202202 0.1722841
## 46     46 7.446435 0.3059818 5.993523 0.1923556 0.03230585 0.1714371
## 47     47 7.445088 0.3061662 5.990944 0.1907561 0.03119105 0.1671542
## 48     48 7.444733 0.3062917 5.992794 0.1921053 0.03110801 0.1703214
## 49     49 7.447215 0.3059060 5.992315 0.1923540 0.03179091 0.1720461
## 50     50 7.454515 0.3046378 5.997594 0.1954035 0.03100057 0.1717813
## 51     51 7.459043 0.3038129 6.000984 0.1959493 0.03043160 0.1714843
## 52     52 7.457689 0.3040567 5.999973 0.1950412 0.02999058 0.1732763
## 53     53 7.460161 0.3036438 6.002046 0.1941133 0.02996394 0.1749587
## 54     54 7.458665 0.3039021 5.998856 0.1957932 0.03018665 0.1765836
## 55     55 7.458525 0.3039893 5.998818 0.1946610 0.03073146 0.1765882
## 56     56 7.458495 0.3040066 6.000316 0.1925586 0.03111565 0.1750025
## 57     57 7.461618 0.3034921 6.002628 0.1908995 0.03107889 0.1734092
## 58     58 7.461668 0.3035251 6.003637 0.1910577 0.03114920 0.1736469
## 59     59 7.462687 0.3033887 6.002845 0.1938016 0.03126947 0.1743759
## 60     60 7.464981 0.3029931 6.004503 0.1971868 0.03167026 0.1765744
## 61     61 7.464731 0.3030484 6.004223 0.1954055 0.03138997 0.1764833
## 62     62 7.467226 0.3026794 6.006139 0.1944846 0.03146926 0.1769925
## 63     63 7.466802 0.3027368 6.008022 0.1943179 0.03108656 0.1753396
## 64     64 7.465766 0.3028847 6.007578 0.1936747 0.03099566 0.1753070
## 65     65 7.466286 0.3028429 6.009350 0.1970834 0.03117439 0.1789336
## 66     66 7.465028 0.3030599 6.008531 0.1949815 0.03119748 0.1779753
## 67     67 7.464328 0.3031795 6.010289 0.1926638 0.03097459 0.1755227
## 68     68 7.462007 0.3035653 6.008786 0.1854068 0.03008338 0.1699636
## 69     69 7.462793 0.3034408 6.008969 0.1835349 0.02976806 0.1672234
## 70     70 7.462629 0.3034568 6.006997 0.1797423 0.02935917 0.1634003
## 71     71 7.462134 0.3035717 6.007213 0.1799822 0.02985786 0.1621585
## 72     72 7.462247 0.3035738 6.005950 0.1799824 0.02957842 0.1620222
## 73     73 7.463903 0.3032994 6.005818 0.1797476 0.02890039 0.1615549
## 74     74 7.465092 0.3031077 6.006152 0.1816170 0.02937457 0.1630813
## 75     75 7.463993 0.3033324 6.005917 0.1784448 0.02935526 0.1594180
## 76     76 7.463569 0.3034421 6.007364 0.1774970 0.02936867 0.1588056
## 77     77 7.462535 0.3036206 6.006788 0.1762982 0.02906282 0.1583425
## 78     78 7.462935 0.3035315 6.006244 0.1737701 0.02888051 0.1567091
## 79     79 7.464049 0.3033087 6.008205 0.1748559 0.02874236 0.1580091
## 80     80 7.463758 0.3033579 6.008287 0.1770595 0.02830228 0.1612032
## 81     81 7.463490 0.3034136 6.007967 0.1757702 0.02853463 0.1604572
## 82     82 7.463714 0.3033760 6.009262 0.1747184 0.02833203 0.1584868
## 83     83 7.463469 0.3034136 6.010350 0.1742840 0.02857277 0.1582699
## 84     84 7.465807 0.3030160 6.011713 0.1760806 0.02881884 0.1622793
## 85     85 7.467805 0.3026852 6.012519 0.1762462 0.02877271 0.1624106
## 86     86 7.470393 0.3022776 6.015897 0.1743797 0.02899617 0.1609173
## 87     87 7.472144 0.3019899 6.016538 0.1746394 0.02888909 0.1622009
## 88     88 7.476761 0.3011887 6.020658 0.1751178 0.02875329 0.1645592
## 89     89 7.476717 0.3012118 6.020685 0.1728937 0.02852949 0.1617514
## 90     90 7.478548 0.3008816 6.021269 0.1712955 0.02819678 0.1616327
## 91     91 7.480706 0.3005357 6.023770 0.1685096 0.02837716 0.1577960
## 92     92 7.481694 0.3003920 6.023754 0.1684273 0.02843920 0.1580735
## 93     93 7.480452 0.3006354 6.020983 0.1674590 0.02843810 0.1571993
## 94     94 7.481129 0.3005364 6.021736 0.1659882 0.02789137 0.1550889
## 95     95 7.477560 0.3011886 6.019449 0.1658501 0.02760519 0.1544966
## 96     96 7.476061 0.3014778 6.019244 0.1657852 0.02813452 0.1539826
## 97     97 7.477624 0.3012274 6.020994 0.1667465 0.02793112 0.1546537
## 98     98 7.478794 0.3010609 6.022099 0.1687038 0.02805873 0.1572810
## 99     99 7.478852 0.3010852 6.021116 0.1695024 0.02814192 0.1591237
## 100   100 7.479255 0.3010390 6.021474 0.1714964 0.02857132 0.1586523
## 101   101 7.482112 0.3005906 6.022739 0.1747651 0.02903290 0.1611855
## 102   102 7.483790 0.3003332 6.024148 0.1740723 0.02916167 0.1610266
## 103   103 7.482890 0.3005193 6.024295 0.1736236 0.02859756 0.1605440
## 104   104 7.482110 0.3006672 6.023661 0.1740576 0.02861456 0.1591839
## 105   105 7.481867 0.3007146 6.024307 0.1750201 0.02887227 0.1608214
## 106   106 7.481346 0.3008175 6.022547 0.1759497 0.02863438 0.1598421
## 107   107 7.483196 0.3004574 6.024145 0.1776078 0.02870464 0.1620256
## 108   108 7.482300 0.3006477 6.022300 0.1784129 0.02882034 0.1626385
## 109   109 7.485256 0.3001719 6.024954 0.1796772 0.02914643 0.1628090
## 110   110 7.483674 0.3004171 6.025128 0.1799563 0.02877608 0.1627745
## 111   111 7.484664 0.3002268 6.027047 0.1787327 0.02869946 0.1614784
## 112   112 7.483446 0.3004325 6.026097 0.1772230 0.02871677 0.1590364
## 113   113 7.484188 0.3003349 6.026473 0.1758197 0.02847121 0.1564457
## 114   114 7.484664 0.3002658 6.026437 0.1739866 0.02806601 0.1535782
## 115   115 7.485659 0.3001148 6.027212 0.1765701 0.02803581 0.1574654
## 116   116 7.484199 0.3003784 6.026175 0.1801506 0.02858137 0.1600078
## 117   117 7.484125 0.3004072 6.025927 0.1807602 0.02894758 0.1609988
## 118   118 7.484993 0.3002656 6.027299 0.1817604 0.02890271 0.1622072
## 119   119 7.483047 0.3006050 6.026290 0.1809712 0.02872918 0.1612230
## 120   120 7.483482 0.3005433 6.026403 0.1799095 0.02876003 0.1606892
## 121   121 7.482671 0.3006940 6.025047 0.1805174 0.02891212 0.1606966
## 122   122 7.483010 0.3006481 6.024836 0.1797293 0.02885267 0.1611424
## 123   123 7.482224 0.3007689 6.023967 0.1794378 0.02893466 0.1608920
## 124   124 7.481836 0.3008573 6.022704 0.1802692 0.02857173 0.1619554
## 125   125 7.483816 0.3005040 6.023753 0.1801832 0.02847614 0.1616379
## 126   126 7.482962 0.3006794 6.022703 0.1814617 0.02866341 0.1633429
## 127   127 7.481630 0.3009145 6.020357 0.1818332 0.02883275 0.1638307
## 128   128 7.482389 0.3008259 6.021013 0.1801535 0.02895334 0.1617400
## 129   129 7.480541 0.3011358 6.019374 0.1784274 0.02897371 0.1600232
## 130   130 7.483362 0.3006449 6.021410 0.1791271 0.02913294 0.1608850
## 131   131 7.482803 0.3007455 6.021288 0.1785396 0.02893092 0.1610388
## 132   132 7.481532 0.3009688 6.019990 0.1776212 0.02894963 0.1613180
## 133   133 7.481698 0.3009509 6.020429 0.1792799 0.02884122 0.1627747
## 134   134 7.480017 0.3012253 6.019205 0.1787937 0.02898496 0.1622940
## 135   135 7.481661 0.3009406 6.019760 0.1787311 0.02882508 0.1624143
## 136   136 7.482611 0.3007951 6.020850 0.1786075 0.02881681 0.1618344
## 137   137 7.483468 0.3006494 6.021042 0.1782447 0.02877637 0.1613787
## 138   138 7.482659 0.3008218 6.020555 0.1787450 0.02884274 0.1619799
## 139   139 7.482215 0.3008986 6.019897 0.1783934 0.02896343 0.1613003
## 140   140 7.483830 0.3006117 6.020355 0.1805303 0.02910175 0.1628473
## 141   141 7.482191 0.3008989 6.020230 0.1801323 0.02910212 0.1622470
## 142   142 7.480697 0.3011547 6.019394 0.1795645 0.02902624 0.1621871
## 143   143 7.480458 0.3012480 6.018294 0.1797042 0.02937277 0.1614369
## 144   144 7.479869 0.3013593 6.017455 0.1787067 0.02939728 0.1603836
## 145   145 7.479976 0.3013604 6.018339 0.1805318 0.02950715 0.1618075
## 146   146 7.480024 0.3013568 6.017801 0.1803560 0.02928101 0.1611418
## 147   147 7.480107 0.3013416 6.017781 0.1783346 0.02910309 0.1599139
## 148   148 7.479734 0.3014082 6.017579 0.1785546 0.02928448 0.1592946
## 149   149 7.479791 0.3014166 6.016904 0.1797133 0.02926474 0.1606923
## 150   150 7.479542 0.3014304 6.016467 0.1803148 0.02915735 0.1616442
## 151   151 7.479575 0.3014218 6.016125 0.1792466 0.02923373 0.1606687
## 152   152 7.478769 0.3015707 6.015533 0.1802560 0.02947013 0.1621583
## 153   153 7.477732 0.3017484 6.014539 0.1810255 0.02942017 0.1622899
## 154   154 7.478595 0.3016044 6.015158 0.1825205 0.02956332 0.1627891
## 155   155 7.479020 0.3015506 6.015378 0.1822478 0.02966981 0.1622843
## 156   156 7.479629 0.3014638 6.016775 0.1828076 0.02957750 0.1633460
## 157   157 7.480843 0.3012895 6.017028 0.1827252 0.02979611 0.1632318
## 158   158 7.481197 0.3012458 6.017609 0.1833259 0.02995381 0.1629606
## 159   159 7.480643 0.3013310 6.017447 0.1843135 0.02990201 0.1647745
## 160   160 7.480214 0.3014178 6.016711 0.1843823 0.02996034 0.1637489
## 161   161 7.481125 0.3012677 6.017056 0.1843600 0.03000321 0.1638921
## 162   162 7.480700 0.3013204 6.016865 0.1846245 0.02977113 0.1637432
## 163   163 7.481151 0.3012515 6.017233 0.1841268 0.02970776 0.1638990
## 164   164 7.481795 0.3011460 6.017561 0.1836578 0.02963372 0.1640277
## 165   165 7.480725 0.3013193 6.016279 0.1845021 0.02970229 0.1634430
## 166   166 7.482433 0.3010411 6.017500 0.1854936 0.02993450 0.1636793
## 167   167 7.481885 0.3011517 6.016891 0.1844574 0.02998097 0.1622699
## 168   168 7.480971 0.3013090 6.016889 0.1848040 0.03012414 0.1626239
## 169   169 7.481340 0.3012496 6.017076 0.1838363 0.03000969 0.1613722
## 170   170 7.480510 0.3013769 6.016831 0.1831698 0.02994864 0.1601393
## 171   171 7.480057 0.3014375 6.015610 0.1834646 0.02991675 0.1601815
## 172   172 7.480177 0.3014369 6.015224 0.1833118 0.02989406 0.1599121
## 173   173 7.480622 0.3013630 6.015025 0.1824693 0.02988696 0.1593063
## 174   174 7.480554 0.3013910 6.015201 0.1823655 0.02988189 0.1598790
## 175   175 7.480668 0.3013549 6.014823 0.1824456 0.02981618 0.1605595
## 176   176 7.481786 0.3011552 6.015112 0.1818679 0.02969456 0.1603792
## 177   177 7.482593 0.3010105 6.015362 0.1812401 0.02959799 0.1601268
## 178   178 7.483822 0.3007954 6.016100 0.1817021 0.02951033 0.1600102
## 179   179 7.484877 0.3006134 6.017150 0.1808875 0.02932040 0.1602670
## 180   180 7.484898 0.3006199 6.017168 0.1816937 0.02941835 0.1609758
## 181   181 7.485763 0.3004812 6.017115 0.1823299 0.02956303 0.1615338
## 182   182 7.485404 0.3005351 6.017368 0.1832483 0.02975653 0.1620426
## 183   183 7.484354 0.3007119 6.016975 0.1837193 0.02970635 0.1626210
## 184   184 7.485183 0.3005817 6.018041 0.1824799 0.02973094 0.1612029
## 185   185 7.485048 0.3005934 6.017920 0.1830404 0.02973858 0.1619881
## 186   186 7.485073 0.3005814 6.018437 0.1829973 0.02980301 0.1617574
## 187   187 7.484021 0.3007685 6.017982 0.1825476 0.02971558 0.1614150
## 188   188 7.483921 0.3007873 6.018009 0.1822752 0.02968734 0.1614929
## 189   189 7.484521 0.3006749 6.017884 0.1829456 0.02962028 0.1617071
## 190   190 7.483636 0.3008212 6.017520 0.1831152 0.02968078 0.1614157
## 191   191 7.483409 0.3008699 6.017511 0.1832174 0.02965141 0.1617752
## 192   192 7.483465 0.3008526 6.017611 0.1819918 0.02947164 0.1606960
## 193   193 7.483885 0.3007819 6.018391 0.1815606 0.02929518 0.1605203
## 194   194 7.483362 0.3008833 6.018452 0.1814657 0.02928589 0.1606683
## 195   195 7.483432 0.3008634 6.018303 0.1811837 0.02913594 0.1604291
## 196   196 7.483396 0.3008739 6.018367 0.1816608 0.02915778 0.1613072
## 197   197 7.483461 0.3008642 6.018659 0.1815574 0.02911527 0.1613200
## 198   198 7.483013 0.3009407 6.018075 0.1815177 0.02914239 0.1612210
## 199   199 7.483845 0.3007964 6.018840 0.1808738 0.02907523 0.1608688
## 200   200 7.483382 0.3008748 6.018817 0.1809624 0.02901449 0.1606217
## 201   201 7.483404 0.3008824 6.018834 0.1811594 0.02910871 0.1607222
## 202   202 7.483452 0.3008719 6.018984 0.1815288 0.02902675 0.1608282
## 203   203 7.483470 0.3008647 6.019293 0.1814845 0.02895399 0.1610469
## 204   204 7.483601 0.3008427 6.019563 0.1815816 0.02890602 0.1615091
## 205   205 7.483740 0.3008149 6.019724 0.1818313 0.02899339 0.1616071
## 206   206 7.483537 0.3008553 6.019279 0.1821229 0.02892115 0.1615915
## 207   207 7.483901 0.3007908 6.019490 0.1822011 0.02901238 0.1614157
## 208   208 7.484019 0.3007693 6.019467 0.1823397 0.02909964 0.1615723
## 209   209 7.483540 0.3008500 6.018840 0.1826541 0.02908732 0.1618458
## 210   210 7.483971 0.3007809 6.019545 0.1827883 0.02914808 0.1619694
## 211   211 7.484121 0.3007603 6.019852 0.1823399 0.02909640 0.1616234
## 212   212 7.484250 0.3007415 6.020120 0.1825087 0.02913662 0.1617496
## 213   213 7.484379 0.3007170 6.020145 0.1822955 0.02907863 0.1615959
## 214   214 7.484604 0.3006784 6.020415 0.1825382 0.02909632 0.1615204
## 215   215 7.485048 0.3006065 6.020643 0.1829020 0.02904780 0.1617774
## 216   216 7.485361 0.3005550 6.021120 0.1822924 0.02901831 0.1615135
## 217   217 7.485561 0.3005281 6.021225 0.1819005 0.02904631 0.1612285
## 218   218 7.485676 0.3005104 6.021167 0.1817411 0.02897691 0.1611274
## 219   219 7.485472 0.3005418 6.020945 0.1820197 0.02898308 0.1614550
## 220   220 7.485475 0.3005451 6.021077 0.1817088 0.02894589 0.1611838
## 221   221 7.485310 0.3005733 6.021241 0.1816350 0.02895481 0.1611551
## 222   222 7.485330 0.3005696 6.021271 0.1815238 0.02892491 0.1613296
## 223   223 7.485533 0.3005342 6.021232 0.1815954 0.02897600 0.1614133
## 224   224 7.485393 0.3005600 6.021279 0.1816679 0.02897485 0.1615359
## 225   225 7.485507 0.3005400 6.021474 0.1814428 0.02895719 0.1614197
## 226   226 7.485774 0.3004966 6.021803 0.1813580 0.02901401 0.1612259
## 227   227 7.485958 0.3004613 6.022085 0.1812933 0.02899000 0.1609602
## 228   228 7.485903 0.3004686 6.022126 0.1813543 0.02896481 0.1611853
## 229   229 7.485865 0.3004746 6.022094 0.1813131 0.02895796 0.1611545
## 230   230 7.485790 0.3004865 6.022059 0.1810982 0.02895731 0.1609416
## 231   231 7.485821 0.3004832 6.021956 0.1811233 0.02894285 0.1608900
## 232   232 7.485821 0.3004816 6.021906 0.1812322 0.02891255 0.1609931
## 233   233 7.485961 0.3004561 6.022020 0.1813644 0.02891960 0.1609937
## 234   234 7.486026 0.3004439 6.022105 0.1813039 0.02891242 0.1609771
## 235   235 7.486028 0.3004447 6.022125 0.1813252 0.02891942 0.1610493
## 236   236 7.486036 0.3004439 6.022118 0.1812701 0.02891840 0.1610519
## 237   237 7.486065 0.3004386 6.022129 0.1812642 0.02891534 0.1610839
## 238   238 7.486033 0.3004442 6.022107 0.1812514 0.02891806 0.1610742
## 239   239 7.486022 0.3004460 6.022095 0.1812484 0.02891771 0.1610637
## 240   240 7.486026 0.3004453 6.022099 0.1812374 0.02891745 0.1610546
##    nvmax
## 26    26

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  8.775478e+01 -1.571182e-02  3.407441e+00  1.528822e-01  9.308719e-01 
##           x10           x11           x16           x17           x21 
##  4.672910e-01  5.305842e+07  2.971064e-01  4.228913e-01  2.603049e-02 
##         stat4         stat6        stat14        stat23        stat33 
## -1.599196e-01 -1.414834e-01 -2.568627e-01  1.404007e-01 -1.696432e-01 
##        stat35        stat38        stat41        stat50        stat98 
## -1.425793e-01  1.920227e-01 -1.584774e-01  1.373511e-01  9.462024e-01 
##       stat100       stat110       stat144       stat149       stat156 
##  1.862947e-01 -9.004465e-01  1.464019e-01 -1.907112e-01  2.131350e-01 
##       stat172      sqrt.x18 
##  1.709260e-01  7.329063e+00

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   106.0   120.8   124.2   124.2   128.0   139.0 
## [1] "leapBackward  Test MSE: 94.0858683870622"

Stepwise Selection (w/ full train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise = step(model.null, scope=list(upper=model.full), data = data.train, direction="both", trace = 0)
  print(summary(model.stepwise))

  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise, data.train)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise, data.test, "Stepwise Selection")
}

Stepwise Selection (w/ filtered train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise2 = step(model.null2, scope=list(upper=model.full2), data = data.train2, direction="both", trace = 0)
  print(summary(model.stepwise2))

  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise2, data.train2)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise2, data.test, "Stepwise Selection (2)")
}

Stepwise Selection with CV (w/ full train)

Train

if (algo.stepwise.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapSeq"
                                   ,feature.names = feature.names)
  model.stepwise = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 9 on full training set
##     nvmax      RMSE  Rsquared      MAE    RMSESD RsquaredSD     MAESD
## 1       1 10.210693 0.1018765 7.806589 0.3486064 0.02595312 0.1962709
## 2       2  9.979515 0.1418363 7.597398 0.3207096 0.02875352 0.1777050
## 3       3  9.814897 0.1693658 7.438338 0.3290643 0.02785753 0.1807549
## 4       4  9.667493 0.1938902 7.231990 0.3201540 0.02682717 0.1803033
## 5       5  9.588875 0.2068466 7.172167 0.3419604 0.02484619 0.1975346
## 6       6  9.591660 0.2066208 7.174795 0.3521685 0.02739708 0.2020679
## 7       7  9.571737 0.2099116 7.168707 0.3525200 0.02833739 0.2076171
## 8       8  9.546348 0.2140867 7.150908 0.3571826 0.02810121 0.2058362
## 9       9  9.521497 0.2180754 7.135342 0.3515932 0.02753943 0.1919503
## 10     10  9.531634 0.2164981 7.142145 0.3502059 0.02798899 0.1947027
## 11     11  9.531499 0.2165176 7.142129 0.3493156 0.02735269 0.1916070
## 12     12  9.541275 0.2149978 7.148145 0.3511383 0.02743081 0.1879036
## 13     13  9.539867 0.2151993 7.145155 0.3456114 0.02644858 0.1873780
## 14     14  9.532611 0.2163848 7.137358 0.3470188 0.02761253 0.1915646
## 15     15  9.525866 0.2174740 7.126506 0.3478493 0.02822187 0.1955673
## 16     16  9.532841 0.2163902 7.129214 0.3525212 0.02855943 0.1972010
## 17     17  9.534851 0.2160552 7.129357 0.3505236 0.02748267 0.1945657
## 18     18  9.647482 0.1976113 7.222395 0.5634864 0.05809294 0.3731880
## 19     19  9.535883 0.2159502 7.135435 0.3475181 0.02829213 0.1958929
## 20     20  9.537824 0.2156687 7.135493 0.3478763 0.02871909 0.1898675
## 21     21  9.536909 0.2159019 7.132231 0.3588146 0.02965921 0.1943078
## 22     22  9.545095 0.2146063 7.140942 0.3574051 0.02982239 0.1961330
## 23     23  9.545275 0.2145559 7.138831 0.3525479 0.02889786 0.1906030
## 24     24  9.717224 0.1836663 7.281704 0.3765159 0.07388510 0.2996599
## 25     25  9.557141 0.2127138 7.149969 0.3600224 0.02911233 0.1922357
## 26     26  9.657701 0.1951223 7.242387 0.4177494 0.05878111 0.2501277
## 27     27  9.644232 0.1981933 7.248369 0.5398596 0.05884316 0.3825956
## 28     28  9.671118 0.1930862 7.249987 0.4163837 0.05811065 0.2496969
## 29     29  9.660115 0.1957328 7.256034 0.5328809 0.05786791 0.3778188
## 30     30  9.794855 0.1715607 7.341636 0.5014443 0.07653227 0.3526236
## 31     31  9.687563 0.1904735 7.265854 0.3524549 0.05356438 0.2561924
## 32     32  9.600078 0.2063063 7.183936 0.3681083 0.02950020 0.2058563
## 33     33  9.832761 0.1669749 7.402819 0.5658021 0.06568036 0.4382143
## 34     34  9.696713 0.1899830 7.259124 0.4381303 0.04971971 0.3112667
## 35     35  9.676930 0.1924304 7.234979 0.4005574 0.05922545 0.2657224
## 36     36  9.774703 0.1755089 7.343104 0.4179294 0.06558233 0.2740389
## 37     37  9.857103 0.1625069 7.399391 0.5171231 0.06946431 0.3631828
## 38     38  9.716166 0.1869891 7.283507 0.3289277 0.03682744 0.2674317
## 39     39  9.625348 0.2025005 7.201939 0.3564929 0.02785892 0.2126164
## 40     40  9.721902 0.1861499 7.290145 0.3277698 0.03701839 0.2699830
## 41     41  9.723392 0.1859487 7.292611 0.3247149 0.03584246 0.2679377
## 42     42  9.717711 0.1868936 7.278478 0.4324117 0.04838245 0.3080909
## 43     43  9.710256 0.1875095 7.283628 0.3831264 0.04760399 0.2559135
## 44     44  9.645711 0.1994672 7.218879 0.3610519 0.02744794 0.2109379
## 45     45  9.647598 0.1992497 7.225484 0.3565794 0.02726203 0.2118465
## 46     46  9.734595 0.1844265 7.294750 0.4279132 0.04825293 0.3066137
## 47     47  9.723054 0.1865137 7.299665 0.5154828 0.05425497 0.3675418
## 48     48  9.655069 0.1981808 7.227297 0.3558743 0.02826220 0.2144426
## 49     49  9.835732 0.1662450 7.398791 0.3731696 0.06691250 0.2673416
## 50     50  9.787152 0.1750603 7.349467 0.4611299 0.05724995 0.3195169
## 51     51  9.849494 0.1658094 7.389141 0.4960428 0.05263783 0.3739671
## 52     52  9.756176 0.1808342 7.314441 0.4061820 0.05499754 0.2496784
## 53     53  9.831005 0.1679537 7.375453 0.5393295 0.06236499 0.3689008
## 54     54  9.755043 0.1814820 7.311708 0.4294747 0.04861528 0.3078929
## 55     55  9.676824 0.1950033 7.251022 0.3571696 0.02741171 0.2117132
## 56     56  9.829988 0.1687042 7.404419 0.4760534 0.05426128 0.3804220
## 57     57  9.750295 0.1824792 7.299991 0.4607139 0.05059320 0.3137337
## 58     58  9.891144 0.1583930 7.431578 0.4738673 0.05649229 0.3700848
## 59     59  9.828104 0.1697498 7.365790 0.5919676 0.06038724 0.3976078
## 60     60  9.764688 0.1801452 7.329783 0.3207330 0.03417814 0.2563213
## 61     61  9.897464 0.1563062 7.467153 0.4580327 0.05911674 0.3579670
## 62     62  9.767811 0.1790226 7.344051 0.3487030 0.05202713 0.2608407
## 63     63  9.749721 0.1826901 7.328246 0.4998323 0.05157607 0.3503903
## 64     64  9.748426 0.1823902 7.300094 0.3970540 0.05660611 0.2579311
## 65     65  9.780192 0.1782103 7.325805 0.5339025 0.05227522 0.3560707
## 66     66  9.840612 0.1658720 7.412150 0.3542917 0.06150612 0.2654338
## 67     67  9.831618 0.1682829 7.359183 0.4450118 0.06537331 0.3230012
## 68     68  9.689276 0.1934971 7.258654 0.3575813 0.02740414 0.2105906
## 69     69  9.890378 0.1582400 7.409822 0.5027955 0.07046284 0.3646972
## 70     70  9.686359 0.1939752 7.256419 0.3586914 0.02777221 0.2084844
## 71     71  9.840229 0.1665679 7.401339 0.3250981 0.04656283 0.2625258
## 72     72  9.778365 0.1782843 7.340713 0.3230135 0.03445577 0.2583162
## 73     73  9.930812 0.1523014 7.433985 0.5793714 0.07510932 0.4180765
## 74     74  9.865865 0.1626748 7.433214 0.2858501 0.05283743 0.2735927
## 75     75  9.886066 0.1596422 7.437647 0.6211988 0.07038555 0.4524209
## 76     76  9.692816 0.1931202 7.260681 0.3576133 0.02727025 0.2061332
## 77     77 10.000591 0.1391080 7.546188 0.4304241 0.07455112 0.3710366
## 78     78  9.835177 0.1682056 7.362040 0.4517959 0.06718730 0.3265192
## 79     79  9.844877 0.1663995 7.386900 0.3476859 0.05835906 0.2890495
## 80     80  9.844261 0.1676607 7.383627 0.5948055 0.06148588 0.4065115
## 81     81  9.774349 0.1792229 7.319887 0.4276916 0.04905973 0.2916783
## 82     82  9.693152 0.1931613 7.262203 0.3492307 0.02645065 0.1994268
## 83     83 10.071883 0.1277877 7.598961 0.6379805 0.08436497 0.4653148
## 84     84  9.855391 0.1655772 7.384057 0.5015271 0.06118019 0.3611131
## 85     85  9.795960 0.1763722 7.338596 0.5404296 0.05292698 0.3572979
## 86     86  9.862577 0.1632011 7.421981 0.3739732 0.06608772 0.2576548
## 87     87  9.761136 0.1817250 7.334105 0.5151351 0.05386527 0.3701072
## 88     88  9.757186 0.1813037 7.324867 0.3788432 0.04662194 0.2410340
## 89     89  9.772962 0.1797818 7.328013 0.4557305 0.05029645 0.3133496
## 90     90  9.939313 0.1508687 7.454678 0.5054536 0.07020713 0.3614883
## 91     91  9.761461 0.1811726 7.312079 0.3982146 0.05761853 0.2621458
## 92     92 10.030463 0.1360867 7.540274 0.5564309 0.06130058 0.4292424
## 93     93  9.787943 0.1768506 7.362820 0.3516935 0.05426362 0.2597708
## 94     94  9.871177 0.1623229 7.421083 0.3416119 0.05319531 0.2629935
## 95     95  9.913995 0.1551337 7.450939 0.5361369 0.07026933 0.3732416
## 96     96  9.695032 0.1930889 7.266328 0.3525717 0.02756996 0.2030230
## 97     97  9.774232 0.1797251 7.329896 0.4626252 0.05225970 0.3219442
## 98     98  9.893412 0.1576035 7.448936 0.5423576 0.07814594 0.4007313
## 99     99  9.849349 0.1658752 7.398554 0.3451775 0.05897299 0.2934096
## 100   100  9.773929 0.1797501 7.330873 0.4592287 0.05185824 0.3212672
## 101   101  9.773781 0.1790262 7.332116 0.3893570 0.05233276 0.2315125
## 102   102  9.900030 0.1572896 7.430891 0.5065422 0.07254343 0.3745622
## 103   103  9.759151 0.1809886 7.329962 0.3720960 0.04717366 0.2405385
## 104   104  9.822466 0.1713740 7.392299 0.5827497 0.06377489 0.4243202
## 105   105  9.692485 0.1935218 7.263099 0.3479311 0.02714981 0.2008952
## 106   106  9.751788 0.1832182 7.315561 0.4463723 0.04519888 0.2951149
## 107   107  9.697418 0.1928149 7.266630 0.3515465 0.02691013 0.2054843
## 108   108  9.777965 0.1790130 7.323488 0.4268141 0.04912295 0.2979911
## 109   109  9.935831 0.1527820 7.454163 0.6466476 0.07025572 0.4644751
## 110   110  9.780302 0.1783284 7.330279 0.3932594 0.05278178 0.2401388
## 111   111  9.893726 0.1588365 7.441268 0.5939926 0.07713887 0.4413163
## 112   112  9.783504 0.1783068 7.329216 0.4227898 0.04892593 0.2959643
## 113   113  9.954299 0.1486873 7.494710 0.4461486 0.07443988 0.3364205
## 114   114  9.701158 0.1924194 7.268522 0.3417138 0.02602544 0.2016328
## 115   115  9.850239 0.1658566 7.390447 0.4231732 0.05885308 0.3064241
## 116   116  9.701022 0.1924449 7.265780 0.3392646 0.02614051 0.1988495
## 117   117 10.052876 0.1330077 7.540090 0.6098548 0.07497737 0.4464442
## 118   118  9.865622 0.1645435 7.439502 0.5138075 0.07024533 0.4060229
## 119   119  9.701963 0.1923261 7.267226 0.3378624 0.02598477 0.1966654
## 120   120  9.779066 0.1789664 7.348713 0.2997716 0.03224976 0.2558880
## 121   121  9.781603 0.1786583 7.349546 0.3007157 0.03227666 0.2534222
## 122   122  9.873110 0.1610296 7.442391 0.3335004 0.06450190 0.2502558
## 123   123  9.849421 0.1667454 7.404453 0.3109422 0.04565980 0.2519778
## 124   124  9.703076 0.1922369 7.272107 0.3360867 0.02644129 0.1941471
## 125   125  9.703337 0.1922342 7.273153 0.3377555 0.02660750 0.1954605
## 126   126  9.749222 0.1836123 7.315994 0.3649733 0.04946376 0.2537729
## 127   127  9.741367 0.1854247 7.305778 0.4020450 0.03761414 0.2533949
## 128   128  9.850252 0.1672402 7.407681 0.4666898 0.05528211 0.3284501
## 129   129  9.863339 0.1632330 7.420674 0.4200658 0.06304436 0.3080222
## 130   130 10.013132 0.1375602 7.562646 0.2938597 0.06181808 0.2514869
## 131   131  9.741807 0.1853440 7.308655 0.4061928 0.03855058 0.2545514
## 132   132  9.811918 0.1732964 7.349678 0.4084036 0.04848722 0.2557072
## 133   133  9.809779 0.1733993 7.379254 0.3028879 0.04332713 0.2509722
## 134   134  9.808822 0.1735905 7.385087 0.2797429 0.03270282 0.2138170
## 135   135  9.700595 0.1927172 7.272141 0.3392941 0.02626019 0.1960024
## 136   136  9.847892 0.1661019 7.403167 0.3184326 0.05670888 0.2104035
## 137   137  9.776589 0.1789378 7.348318 0.3842219 0.04041236 0.2373584
## 138   138  9.701426 0.1926678 7.272380 0.3400900 0.02662710 0.1966177
## 139   139  9.832061 0.1711776 7.366421 0.4961808 0.04741722 0.3239161
## 140   140  9.740957 0.1852461 7.312324 0.3555060 0.04482339 0.2409866
## 141   141  9.735062 0.1866462 7.299774 0.3860929 0.03519673 0.2326746
## 142   142  9.703176 0.1924256 7.273295 0.3356725 0.02643224 0.1913767
## 143   143  9.742979 0.1849784 7.314233 0.3525888 0.04445209 0.2375254
## 144   144  9.765043 0.1817362 7.334227 0.3168716 0.04169471 0.1961039
## 145   145  9.705307 0.1921773 7.276121 0.3356178 0.02671873 0.1895523
## 146   146  9.755925 0.1840724 7.338725 0.4520656 0.04304802 0.3328733
## 147   147  9.775013 0.1802629 7.345276 0.2929510 0.02889300 0.2211458
## 148   148  9.753636 0.1839108 7.316781 0.3603562 0.03445509 0.2383081
## 149   149  9.800031 0.1760471 7.391642 0.4449877 0.04737184 0.3293616
## 150   150  9.748628 0.1842598 7.327611 0.3326971 0.03513098 0.1983941
## 151   151  9.706870 0.1919910 7.277071 0.3382122 0.02706506 0.1892865
## 152   152  9.757459 0.1828033 7.311885 0.3458595 0.03930539 0.1871026
## 153   153  9.704477 0.1923266 7.275587 0.3382009 0.02706117 0.1907077
## 154   154  9.772198 0.1811246 7.328147 0.4228137 0.04461907 0.2795167
## 155   155  9.768658 0.1818517 7.320530 0.4414057 0.03645477 0.2676989
## 156   156  9.767250 0.1814688 7.335802 0.3189711 0.04230599 0.1987931
## 157   157  9.822920 0.1720471 7.384154 0.3127435 0.03408248 0.2603302
## 158   158  9.851346 0.1674205 7.385357 0.4889294 0.04452955 0.3208031
## 159   159  9.770662 0.1815350 7.320686 0.4451912 0.03677697 0.2712851
## 160   160  9.703530 0.1924203 7.272036 0.3406528 0.02676740 0.1911692
## 161   161  9.768353 0.1812838 7.334101 0.3203554 0.04239639 0.1985483
## 162   162  9.768602 0.1818484 7.317724 0.4457636 0.03691740 0.2711759
## 163   163  9.737757 0.1863253 7.297423 0.3923978 0.03603293 0.2275220
## 164   164  9.770729 0.1813514 7.324596 0.4250056 0.04470633 0.2811342
## 165   165  9.769669 0.1817289 7.318516 0.4474700 0.03694440 0.2713981
## 166   166  9.703860 0.1924260 7.274302 0.3417772 0.02670273 0.1913416
## 167   167  9.832479 0.1701161 7.390410 0.2991708 0.03956703 0.2203344
## 168   168  9.703266 0.1925020 7.274114 0.3418229 0.02707552 0.1902636
## 169   169  9.769317 0.1817805 7.320543 0.4473617 0.03693371 0.2703031
## 170   170  9.767026 0.1816034 7.334321 0.3234128 0.04257129 0.1986012
## 171   171  9.751993 0.1847402 7.336796 0.4566331 0.04338052 0.3360787
## 172   172  9.839283 0.1703245 7.374352 0.5103196 0.04955599 0.3403145
## 173   173  9.817456 0.1729419 7.379632 0.4234971 0.04966061 0.2879792
## 174   174  9.700346 0.1929592 7.271243 0.3434638 0.02733004 0.1933120
## 175   175  9.840646 0.1691407 7.405489 0.2584515 0.04142277 0.2137386
## 176   176  9.815724 0.1734090 7.377172 0.4450340 0.04282132 0.2829553
## 177   177  9.883694 0.1622727 7.422592 0.4318052 0.04843592 0.2946384
## 178   178  9.703302 0.1925764 7.273377 0.3455844 0.02739900 0.1954381
## 179   179  9.767190 0.1817120 7.332937 0.3255151 0.04217534 0.1976953
## 180   180  9.749030 0.1843008 7.326404 0.3439113 0.03603598 0.2065651
## 181   181  9.862745 0.1649615 7.416957 0.3734161 0.05038234 0.2851458
## 182   182  9.704905 0.1923527 7.274961 0.3465582 0.02750230 0.1962965
## 183   183  9.748813 0.1843504 7.325827 0.3453018 0.03626261 0.2069183
## 184   184  9.704279 0.1924540 7.274862 0.3467134 0.02768583 0.1961520
## 185   185  9.815369 0.1740385 7.357137 0.5014352 0.04518725 0.3140779
## 186   186  9.705617 0.1922660 7.275475 0.3456350 0.02736520 0.1959619
## 187   187  9.894543 0.1601273 7.452993 0.3607078 0.04533346 0.2823943
## 188   188  9.781137 0.1795707 7.349705 0.3062776 0.03133338 0.2361290
## 189   189  9.749546 0.1847112 7.306970 0.4149401 0.03930423 0.2443050
## 190   190  9.705897 0.1921961 7.275586 0.3440982 0.02718558 0.1946897
## 191   191  9.754991 0.1843633 7.338171 0.4554987 0.04301294 0.3365069
## 192   192  9.808474 0.1743505 7.352347 0.4173092 0.04812185 0.2385322
## 193   193  9.708116 0.1918683 7.276533 0.3432657 0.02698029 0.1949423
## 194   194  9.821911 0.1726158 7.380334 0.4439018 0.04260161 0.2839781
## 195   195  9.806780 0.1743419 7.360756 0.3709727 0.05415419 0.2407021
## 196   196  9.866339 0.1656805 7.430827 0.5412073 0.05892743 0.4031245
## 197   197  9.773751 0.1812483 7.326601 0.4515810 0.03763095 0.2845253
## 198   198  9.706930 0.1920668 7.275930 0.3439761 0.02702817 0.1951985
## 199   199  9.852996 0.1678184 7.403670 0.3853046 0.04433907 0.3049050
## 200   200  9.773950 0.1812211 7.326940 0.4517410 0.03760941 0.2836997
## 201   201  9.762098 0.1827311 7.320500 0.3759972 0.03710584 0.2534567
## 202   202  9.833842 0.1703039 7.408169 0.2914936 0.03739392 0.2348183
## 203   203  9.706883 0.1920770 7.275996 0.3427294 0.02698645 0.1947452
## 204   204  9.706918 0.1920745 7.276343 0.3418284 0.02690730 0.1944711
## 205   205  9.766026 0.1817595 7.324016 0.3577664 0.04173812 0.2032299
## 206   206  9.707184 0.1920361 7.276479 0.3417104 0.02686366 0.1949266
## 207   207  9.840655 0.1696221 7.396774 0.3253917 0.03718039 0.2750289
## 208   208  9.786022 0.1788652 7.354301 0.3056369 0.03211498 0.2431751
## 209   209  9.765339 0.1818839 7.322790 0.3574270 0.04151411 0.2018226
## 210   210  9.774758 0.1807605 7.340982 0.3250467 0.04321890 0.2046218
## 211   211  9.707230 0.1920369 7.276482 0.3420154 0.02689284 0.1944229
## 212   212  9.759323 0.1839069 7.341299 0.4631202 0.04377468 0.3410788
## 213   213  9.707529 0.1919811 7.276811 0.3422276 0.02685810 0.1946583
## 214   214  9.706872 0.1920779 7.276312 0.3421563 0.02687518 0.1946258
## 215   215  9.707125 0.1920436 7.276537 0.3423709 0.02688269 0.1948166
## 216   216  9.707523 0.1919918 7.276899 0.3421291 0.02683683 0.1943649
## 217   217  9.777332 0.1807731 7.331015 0.4568853 0.03850126 0.2918640
## 218   218  9.775042 0.1807149 7.342036 0.3250848 0.04327753 0.2061948
## 219   219  9.707181 0.1920418 7.276845 0.3416553 0.02685859 0.1940363
## 220   220  9.707422 0.1920032 7.277065 0.3416047 0.02686831 0.1939818
## 221   221  9.707501 0.1919944 7.277178 0.3418774 0.02689742 0.1940936
## 222   222  9.707310 0.1920203 7.276914 0.3417660 0.02687880 0.1941040
## 223   223  9.755694 0.1833930 7.331231 0.3413574 0.03667873 0.2076602
## 224   224  9.774799 0.1807750 7.342142 0.3245774 0.04307101 0.2058221
## 225   225  9.820513 0.1728980 7.384374 0.3422645 0.05609527 0.2480655
## 226   226  9.707045 0.1920539 7.276607 0.3416431 0.02685931 0.1943162
## 227   227  9.769509 0.1814068 7.327492 0.3616857 0.04273247 0.2079439
## 228   228  9.706890 0.1920750 7.276668 0.3413004 0.02681223 0.1939648
## 229   229  9.706862 0.1920806 7.276604 0.3413612 0.02683954 0.1941413
## 230   230  9.706910 0.1920736 7.276613 0.3413077 0.02683709 0.1940985
## 231   231  9.780208 0.1804681 7.332348 0.4646151 0.03940813 0.2962378
## 232   232  9.844655 0.1688144 7.403243 0.3083319 0.04186990 0.2325107
## 233   233  9.706781 0.1920922 7.276498 0.3415837 0.02685977 0.1943116
## 234   234  9.813055 0.1746719 7.400711 0.4659393 0.05004793 0.3469729
## 235   235  9.706860 0.1920807 7.276577 0.3414986 0.02684323 0.1942913
## 236   236  9.899372 0.1611273 7.456086 0.4997876 0.06083318 0.3755565
## 237   237  9.957488 0.1501580 7.508795 0.2790800 0.05400183 0.2731042
## 238   238  9.882259 0.1640424 7.444013 0.5652339 0.06141656 0.4221842
## 239   239  9.888525 0.1620613 7.474164 0.4193487 0.04748758 0.3416164
## 240   240  9.706892 0.1920777 7.276628 0.3415893 0.02684758 0.1943579
##   nvmax
## 9     9

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

## (Intercept)          x4          x7          x9         x10         x16 
## 96.70112202 -0.01426636  3.24632946  0.95963994  0.38442187  0.28876623 
##         x17      stat98     stat110    sqrt.x18 
##  0.43713626  1.02673501 -0.96934667  7.48687040

Test

if (algo.stepwise.caret == TRUE){
  test.model(model.stepwise, data.test
             ,method = 'leapSeq',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE)
  
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   109.7   122.1   125.4   125.4   129.1   142.0 
## [1] "leapSeq  Test MSE: 93.4589126618511"

Stepwise Selection with CV (w/ filtered train)

Train

Test

LASSO (w/ full train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train[,feature.names])
  y = data.train[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO (w/ filtered train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train2[,feature.names])
  y = data.train2[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)  
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO with CV (w/ full train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.206 on full training set
## glmnet 
## 
## 6002 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE      Rsquared   MAE     
##   0.01000000  9.683314  0.1947497  7.258914
##   0.01047616  9.682252  0.1948730  7.258121
##   0.01097499  9.681149  0.1950015  7.257291
##   0.01149757  9.680001  0.1951353  7.256432
##   0.01204504  9.678811  0.1952741  7.255538
##   0.01261857  9.677573  0.1954190  7.254600
##   0.01321941  9.676283  0.1955702  7.253624
##   0.01384886  9.674944  0.1957276  7.252616
##   0.01450829  9.673560  0.1958905  7.251587
##   0.01519911  9.672134  0.1960584  7.250540
##   0.01592283  9.670659  0.1962327  7.249462
##   0.01668101  9.669132  0.1964136  7.248342
##   0.01747528  9.667530  0.1966044  7.247163
##   0.01830738  9.665859  0.1968043  7.245919
##   0.01917910  9.664136  0.1970110  7.244639
##   0.02009233  9.662350  0.1972262  7.243311
##   0.02104904  9.660489  0.1974516  7.241930
##   0.02205131  9.658565  0.1976854  7.240499
##   0.02310130  9.656594  0.1979257  7.239038
##   0.02420128  9.654557  0.1981749  7.237552
##   0.02535364  9.652449  0.1984343  7.236054
##   0.02656088  9.650271  0.1987039  7.234512
##   0.02782559  9.647993  0.1989883  7.232919
##   0.02915053  9.645617  0.1992872  7.231258
##   0.03053856  9.643192  0.1995933  7.229531
##   0.03199267  9.640731  0.1999053  7.227750
##   0.03351603  9.638243  0.2002211  7.225966
##   0.03511192  9.635698  0.2005461  7.224152
##   0.03678380  9.633099  0.2008795  7.222309
##   0.03853529  9.630444  0.2012218  7.220415
##   0.04037017  9.627751  0.2015710  7.218525
##   0.04229243  9.625034  0.2019255  7.216609
##   0.04430621  9.622280  0.2022876  7.214684
##   0.04641589  9.619490  0.2026574  7.212712
##   0.04862602  9.616710  0.2030285  7.210699
##   0.05094138  9.613910  0.2034061  7.208763
##   0.05336699  9.611037  0.2037994  7.206782
##   0.05590810  9.608191  0.2041925  7.204807
##   0.05857021  9.605362  0.2045866  7.202842
##   0.06135907  9.602557  0.2049816  7.200901
##   0.06428073  9.599734  0.2053840  7.199000
##   0.06734151  9.596885  0.2057958  7.197076
##   0.07054802  9.594012  0.2062178  7.195183
##   0.07390722  9.591143  0.2066455  7.193257
##   0.07742637  9.588277  0.2070792  7.191382
##   0.08111308  9.585420  0.2075185  7.189601
##   0.08497534  9.582480  0.2079799  7.187666
##   0.08902151  9.579552  0.2084479  7.185716
##   0.09326033  9.576664  0.2089195  7.183743
##   0.09770100  9.573888  0.2093827  7.181969
##   0.10235310  9.571028  0.2098708  7.180108
##   0.10722672  9.568137  0.2103754  7.178367
##   0.11233240  9.565267  0.2108869  7.176642
##   0.11768120  9.562503  0.2113917  7.175070
##   0.12328467  9.559797  0.2118961  7.173592
##   0.12915497  9.557171  0.2123985  7.172267
##   0.13530478  9.554631  0.2128994  7.171060
##   0.14174742  9.552208  0.2133942  7.169982
##   0.14849683  9.549898  0.2138846  7.169145
##   0.15556761  9.547798  0.2143546  7.168613
##   0.16297508  9.545768  0.2148292  7.168227
##   0.17073526  9.544084  0.2152603  7.168232
##   0.17886495  9.542876  0.2156236  7.168706
##   0.18738174  9.542090  0.2159305  7.169562
##   0.19630407  9.541690  0.2161876  7.170711
##   0.20565123  9.541671  0.2163969  7.172152
##   0.21544347  9.542450  0.2164836  7.174521
##   0.22570197  9.543938  0.2164631  7.177692
##   0.23644894  9.546031  0.2163527  7.181313
##   0.24770764  9.548604  0.2161775  7.185329
##   0.25950242  9.551729  0.2159250  7.189818
##   0.27185882  9.555555  0.2155673  7.194719
##   0.28480359  9.559537  0.2152021  7.199686
##   0.29836472  9.563935  0.2147845  7.204897
##   0.31257158  9.568234  0.2144162  7.209862
##   0.32745492  9.572758  0.2140382  7.215102
##   0.34304693  9.577763  0.2136036  7.220779
##   0.35938137  9.583137  0.2131356  7.226737
##   0.37649358  9.588988  0.2126201  7.233139
##   0.39442061  9.595407  0.2120385  7.240133
##   0.41320124  9.602478  0.2113764  7.247595
##   0.43287613  9.610291  0.2106184  7.255678
##   0.45348785  9.618707  0.2097963  7.264254
##   0.47508102  9.627833  0.2088881  7.273393
##   0.49770236  9.637721  0.2078838  7.283147
##   0.52140083  9.648458  0.2067660  7.293731
##   0.54622772  9.659229  0.2057080  7.304629
##   0.57223677  9.670100  0.2046964  7.315799
##   0.59948425  9.680347  0.2039161  7.326409
##   0.62802914  9.690774  0.2031960  7.337287
##   0.65793322  9.701400  0.2025560  7.348618
##   0.68926121  9.712725  0.2018941  7.360563
##   0.72208090  9.724793  0.2012150  7.373089
##   0.75646333  9.737983  0.2004409  7.386501
##   0.79248290  9.752395  0.1995560  7.400899
##   0.83021757  9.768176  0.1985277  7.416493
##   0.86974900  9.785466  0.1973253  7.433198
##   0.91116276  9.804406  0.1959137  7.451203
##   0.95454846  9.825149  0.1942497  7.470689
##   1.00000000  9.847863  0.1922793  7.491584
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.2056512.

##    alpha    lambda
## 66     1 0.2056512
##     alpha     lambda     RMSE  Rsquared      MAE    RMSESD RsquaredSD
## 1       1 0.01000000 9.683314 0.1947497 7.258914 0.3428747 0.02697976
## 2       1 0.01047616 9.682252 0.1948730 7.258121 0.3429267 0.02698519
## 3       1 0.01097499 9.681149 0.1950015 7.257291 0.3429803 0.02699024
## 4       1 0.01149757 9.680001 0.1951353 7.256432 0.3430384 0.02699556
## 5       1 0.01204504 9.678811 0.1952741 7.255538 0.3431006 0.02700054
## 6       1 0.01261857 9.677573 0.1954190 7.254600 0.3431676 0.02700570
## 7       1 0.01321941 9.676283 0.1955702 7.253624 0.3432384 0.02701140
## 8       1 0.01384886 9.674944 0.1957276 7.252616 0.3433116 0.02701721
## 9       1 0.01450829 9.673560 0.1958905 7.251587 0.3433855 0.02702349
## 10      1 0.01519911 9.672134 0.1960584 7.250540 0.3434646 0.02703095
## 11      1 0.01592283 9.670659 0.1962327 7.249462 0.3435378 0.02703835
## 12      1 0.01668101 9.669132 0.1964136 7.248342 0.3436182 0.02704626
## 13      1 0.01747528 9.667530 0.1966044 7.247163 0.3436981 0.02705531
## 14      1 0.01830738 9.665859 0.1968043 7.245919 0.3437797 0.02706389
## 15      1 0.01917910 9.664136 0.1970110 7.244639 0.3438593 0.02707277
## 16      1 0.02009233 9.662350 0.1972262 7.243311 0.3439391 0.02708245
## 17      1 0.02104904 9.660489 0.1974516 7.241930 0.3440021 0.02709217
## 18      1 0.02205131 9.658565 0.1976854 7.240499 0.3440522 0.02710123
## 19      1 0.02310130 9.656594 0.1979257 7.239038 0.3440932 0.02710909
## 20      1 0.02420128 9.654557 0.1981749 7.237552 0.3441382 0.02711809
## 21      1 0.02535364 9.652449 0.1984343 7.236054 0.3441944 0.02712687
## 22      1 0.02656088 9.650271 0.1987039 7.234512 0.3442562 0.02713518
## 23      1 0.02782559 9.647993 0.1989883 7.232919 0.3443512 0.02714496
## 24      1 0.02915053 9.645617 0.1992872 7.231258 0.3444578 0.02714988
## 25      1 0.03053856 9.643192 0.1995933 7.229531 0.3445703 0.02715834
## 26      1 0.03199267 9.640731 0.1999053 7.227750 0.3446695 0.02717460
## 27      1 0.03351603 9.638243 0.2002211 7.225966 0.3447911 0.02718700
## 28      1 0.03511192 9.635698 0.2005461 7.224152 0.3448995 0.02720112
## 29      1 0.03678380 9.633099 0.2008795 7.222309 0.3449703 0.02721138
## 30      1 0.03853529 9.630444 0.2012218 7.220415 0.3450263 0.02721701
## 31      1 0.04037017 9.627751 0.2015710 7.218525 0.3450760 0.02722023
## 32      1 0.04229243 9.625034 0.2019255 7.216609 0.3451659 0.02722364
## 33      1 0.04430621 9.622280 0.2022876 7.214684 0.3452297 0.02722084
## 34      1 0.04641589 9.619490 0.2026574 7.212712 0.3452618 0.02721546
## 35      1 0.04862602 9.616710 0.2030285 7.210699 0.3453656 0.02721373
## 36      1 0.05094138 9.613910 0.2034061 7.208763 0.3455013 0.02721496
## 37      1 0.05336699 9.611037 0.2037994 7.206782 0.3457420 0.02723865
## 38      1 0.05590810 9.608191 0.2041925 7.204807 0.3460512 0.02727352
## 39      1 0.05857021 9.605362 0.2045866 7.202842 0.3462917 0.02730190
## 40      1 0.06135907 9.602557 0.2049816 7.200901 0.3465149 0.02733094
## 41      1 0.06428073 9.599734 0.2053840 7.199000 0.3467736 0.02735571
## 42      1 0.06734151 9.596885 0.2057958 7.197076 0.3470965 0.02739050
## 43      1 0.07054802 9.594012 0.2062178 7.195183 0.3474795 0.02742859
## 44      1 0.07390722 9.591143 0.2066455 7.193257 0.3479090 0.02747505
## 45      1 0.07742637 9.588277 0.2070792 7.191382 0.3483240 0.02752012
## 46      1 0.08111308 9.585420 0.2075185 7.189601 0.3487391 0.02756506
## 47      1 0.08497534 9.582480 0.2079799 7.187666 0.3490634 0.02761346
## 48      1 0.08902151 9.579552 0.2084479 7.185716 0.3493420 0.02766029
## 49      1 0.09326033 9.576664 0.2089195 7.183743 0.3495814 0.02772191
## 50      1 0.09770100 9.573888 0.2093827 7.181969 0.3497741 0.02778427
## 51      1 0.10235310 9.571028 0.2098708 7.180108 0.3499936 0.02785740
## 52      1 0.10722672 9.568137 0.2103754 7.178367 0.3502520 0.02792019
## 53      1 0.11233240 9.565267 0.2108869 7.176642 0.3502716 0.02795991
## 54      1 0.11768120 9.562503 0.2113917 7.175070 0.3501395 0.02798127
## 55      1 0.12328467 9.559797 0.2118961 7.173592 0.3498136 0.02794991
## 56      1 0.12915497 9.557171 0.2123985 7.172267 0.3495071 0.02790368
## 57      1 0.13530478 9.554631 0.2128994 7.171060 0.3491542 0.02784109
## 58      1 0.14174742 9.552208 0.2133942 7.169982 0.3487650 0.02773892
## 59      1 0.14849683 9.549898 0.2138846 7.169145 0.3484433 0.02763878
## 60      1 0.15556761 9.547798 0.2143546 7.168613 0.3481018 0.02753719
## 61      1 0.16297508 9.545768 0.2148292 7.168227 0.3477000 0.02747200
## 62      1 0.17073526 9.544084 0.2152603 7.168232 0.3471891 0.02741308
## 63      1 0.17886495 9.542876 0.2156236 7.168706 0.3464664 0.02732299
## 64      1 0.18738174 9.542090 0.2159305 7.169562 0.3456185 0.02724017
## 65      1 0.19630407 9.541690 0.2161876 7.170711 0.3448662 0.02718254
## 66      1 0.20565123 9.541671 0.2163969 7.172152 0.3441831 0.02713016
## 67      1 0.21544347 9.542450 0.2164836 7.174521 0.3435270 0.02706044
## 68      1 0.22570197 9.543938 0.2164631 7.177692 0.3429665 0.02699680
## 69      1 0.23644894 9.546031 0.2163527 7.181313 0.3426665 0.02691512
## 70      1 0.24770764 9.548604 0.2161775 7.185329 0.3425309 0.02684046
## 71      1 0.25950242 9.551729 0.2159250 7.189818 0.3424788 0.02680263
## 72      1 0.27185882 9.555555 0.2155673 7.194719 0.3424559 0.02680931
## 73      1 0.28480359 9.559537 0.2152021 7.199686 0.3422290 0.02679738
## 74      1 0.29836472 9.563935 0.2147845 7.204897 0.3418398 0.02677481
## 75      1 0.31257158 9.568234 0.2144162 7.209862 0.3416273 0.02680684
## 76      1 0.32745492 9.572758 0.2140382 7.215102 0.3413470 0.02682275
## 77      1 0.34304693 9.577763 0.2136036 7.220779 0.3410178 0.02682982
## 78      1 0.35938137 9.583137 0.2131356 7.226737 0.3406714 0.02681620
## 79      1 0.37649358 9.588988 0.2126201 7.233139 0.3402374 0.02679919
## 80      1 0.39442061 9.595407 0.2120385 7.240133 0.3397507 0.02678886
## 81      1 0.41320124 9.602478 0.2113764 7.247595 0.3392553 0.02678000
## 82      1 0.43287613 9.610291 0.2106184 7.255678 0.3387778 0.02678562
## 83      1 0.45348785 9.618707 0.2097963 7.264254 0.3382744 0.02679074
## 84      1 0.47508102 9.627833 0.2088881 7.273393 0.3377416 0.02679681
## 85      1 0.49770236 9.637721 0.2078838 7.283147 0.3371289 0.02679664
## 86      1 0.52140083 9.648458 0.2067660 7.293731 0.3364154 0.02678696
## 87      1 0.54622772 9.659229 0.2057080 7.304629 0.3357770 0.02677232
## 88      1 0.57223677 9.670100 0.2046964 7.315799 0.3349302 0.02661834
## 89      1 0.59948425 9.680347 0.2039161 7.326409 0.3345857 0.02662485
## 90      1 0.62802914 9.690774 0.2031960 7.337287 0.3340211 0.02657944
## 91      1 0.65793322 9.701400 0.2025560 7.348618 0.3334371 0.02667713
## 92      1 0.68926121 9.712725 0.2018941 7.360563 0.3324971 0.02673405
## 93      1 0.72208090 9.724793 0.2012150 7.373089 0.3317698 0.02686315
## 94      1 0.75646333 9.737983 0.2004409 7.386501 0.3313057 0.02704626
## 95      1 0.79248290 9.752395 0.1995560 7.400899 0.3308204 0.02725166
## 96      1 0.83021757 9.768176 0.1985277 7.416493 0.3303270 0.02748130
## 97      1 0.86974900 9.785466 0.1973253 7.433198 0.3298203 0.02773901
## 98      1 0.91116276 9.804406 0.1959137 7.451203 0.3293017 0.02802852
## 99      1 0.95454846 9.825149 0.1942497 7.470689 0.3287731 0.02835385
## 100     1 1.00000000 9.847863 0.1922793 7.491584 0.3282368 0.02871917
##         MAESD
## 1   0.1951676
## 2   0.1951837
## 3   0.1952010
## 4   0.1952238
## 5   0.1952524
## 6   0.1952842
## 7   0.1953180
## 8   0.1953605
## 9   0.1954075
## 10  0.1954552
## 11  0.1954845
## 12  0.1954986
## 13  0.1955090
## 14  0.1955179
## 15  0.1955392
## 16  0.1955751
## 17  0.1956029
## 18  0.1956323
## 19  0.1956673
## 20  0.1956956
## 21  0.1956733
## 22  0.1956454
## 23  0.1956382
## 24  0.1956443
## 25  0.1956761
## 26  0.1957274
## 27  0.1958033
## 28  0.1958443
## 29  0.1958324
## 30  0.1958208
## 31  0.1957562
## 32  0.1956984
## 33  0.1956896
## 34  0.1957049
## 35  0.1958062
## 36  0.1959310
## 37  0.1960475
## 38  0.1960717
## 39  0.1960146
## 40  0.1958644
## 41  0.1957417
## 42  0.1957212
## 43  0.1958509
## 44  0.1959891
## 45  0.1960083
## 46  0.1960946
## 47  0.1962168
## 48  0.1963759
## 49  0.1965028
## 50  0.1965934
## 51  0.1966812
## 52  0.1968328
## 53  0.1966829
## 54  0.1965463
## 55  0.1961633
## 56  0.1956906
## 57  0.1948314
## 58  0.1938750
## 59  0.1930786
## 60  0.1923940
## 61  0.1916089
## 62  0.1910437
## 63  0.1902613
## 64  0.1894573
## 65  0.1885676
## 66  0.1876295
## 67  0.1867392
## 68  0.1860179
## 69  0.1855288
## 70  0.1852457
## 71  0.1851124
## 72  0.1850899
## 73  0.1849618
## 74  0.1847224
## 75  0.1846318
## 76  0.1845475
## 77  0.1844374
## 78  0.1842587
## 79  0.1841053
## 80  0.1839147
## 81  0.1837462
## 82  0.1836985
## 83  0.1836315
## 84  0.1834965
## 85  0.1833816
## 86  0.1832064
## 87  0.1829106
## 88  0.1823745
## 89  0.1818319
## 90  0.1812989
## 91  0.1806567
## 92  0.1797544
## 93  0.1791612
## 94  0.1787535
## 95  0.1785088
## 96  0.1782305
## 97  0.1779180
## 98  0.1774256
## 99  0.1768560
## 100 0.1761699

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   111.2   122.3   125.4   125.4   128.7   139.3 
## [1] "glmnet LASSO Test MSE: 92.838456849401"

LASSO with CV (w/ filtered train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.102 on full training set
## glmnet 
## 
## 5714 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5142, 5142, 5143, 5143, 5143, 5143, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE      Rsquared   MAE     
##   0.01000000  7.464449  0.3034518  6.004609
##   0.01047616  7.463527  0.3035824  6.003898
##   0.01097499  7.462571  0.3037181  6.003163
##   0.01149757  7.461579  0.3038592  6.002399
##   0.01204504  7.460558  0.3040043  6.001614
##   0.01261857  7.459502  0.3041546  6.000800
##   0.01321941  7.458417  0.3043093  5.999965
##   0.01384886  7.457296  0.3044694  5.999116
##   0.01450829  7.456149  0.3046334  5.998246
##   0.01519911  7.454970  0.3048025  5.997346
##   0.01592283  7.453775  0.3049739  5.996445
##   0.01668101  7.452541  0.3051515  5.995511
##   0.01747528  7.451268  0.3053355  5.994569
##   0.01830738  7.449957  0.3055258  5.993594
##   0.01917910  7.448604  0.3057230  5.992612
##   0.02009233  7.447217  0.3059258  5.991626
##   0.02104904  7.445803  0.3061331  5.990635
##   0.02205131  7.444360  0.3063453  5.989632
##   0.02310130  7.442886  0.3065625  5.988628
##   0.02420128  7.441380  0.3067852  5.987606
##   0.02535364  7.439884  0.3070063  5.986590
##   0.02656088  7.438367  0.3072313  5.985547
##   0.02782559  7.436823  0.3074619  5.984460
##   0.02915053  7.435255  0.3076971  5.983371
##   0.03053856  7.433606  0.3079470  5.982248
##   0.03199267  7.431923  0.3082040  5.981112
##   0.03351603  7.430213  0.3084672  5.980009
##   0.03511192  7.428494  0.3087333  5.978877
##   0.03678380  7.426791  0.3089977  5.977699
##   0.03853529  7.425091  0.3092638  5.976526
##   0.04037017  7.423354  0.3095392  5.975334
##   0.04229243  7.421628  0.3098156  5.974183
##   0.04430621  7.419898  0.3100962  5.973065
##   0.04641589  7.418196  0.3103751  5.972014
##   0.04862602  7.416550  0.3106469  5.971039
##   0.05094138  7.414952  0.3109142  5.970117
##   0.05336699  7.413430  0.3111725  5.969324
##   0.05590810  7.411988  0.3114207  5.968709
##   0.05857021  7.410594  0.3116656  5.968206
##   0.06135907  7.409276  0.3119025  5.967867
##   0.06428073  7.407996  0.3121382  5.967566
##   0.06734151  7.406805  0.3123644  5.967352
##   0.07054802  7.405576  0.3126054  5.966991
##   0.07390722  7.404463  0.3128324  5.966643
##   0.07742637  7.403422  0.3130540  5.966333
##   0.08111308  7.402504  0.3132617  5.966059
##   0.08497534  7.401438  0.3135085  5.965552
##   0.08902151  7.400547  0.3137330  5.965180
##   0.09326033  7.399878  0.3139259  5.964900
##   0.09770100  7.399452  0.3140836  5.964749
##   0.10235310  7.399278  0.3142043  5.964759
##   0.10722672  7.399386  0.3142820  5.965066
##   0.11233240  7.399776  0.3143163  5.965509
##   0.11768120  7.400489  0.3143000  5.966081
##   0.12328467  7.401288  0.3142792  5.966719
##   0.12915497  7.402379  0.3142142  5.967652
##   0.13530478  7.403522  0.3141548  5.968801
##   0.14174742  7.404977  0.3140470  5.970301
##   0.14849683  7.406445  0.3139501  5.971794
##   0.15556761  7.408281  0.3137950  5.973572
##   0.16297508  7.410457  0.3135858  5.975574
##   0.17073526  7.413142  0.3132906  5.977999
##   0.17886495  7.416022  0.3129705  5.980744
##   0.18738174  7.419410  0.3125661  5.983762
##   0.19630407  7.423176  0.3121025  5.986993
##   0.20565123  7.427342  0.3115776  5.990519
##   0.21544347  7.431557  0.3110759  5.993916
##   0.22570197  7.436253  0.3104992  5.997585
##   0.23644894  7.441064  0.3099320  6.001454
##   0.24770764  7.446356  0.3092923  6.005923
##   0.25950242  7.451776  0.3086593  6.010998
##   0.27185882  7.457606  0.3079705  6.016452
##   0.28480359  7.463349  0.3073312  6.021865
##   0.29836472  7.469583  0.3066248  6.027712
##   0.31257158  7.476067  0.3059119  6.033847
##   0.32745492  7.483209  0.3051021  6.040534
##   0.34304693  7.491082  0.3041778  6.047657
##   0.35938137  7.499611  0.3031639  6.055306
##   0.37649358  7.508558  0.3021237  6.063291
##   0.39442061  7.518159  0.3009982  6.071765
##   0.41320124  7.527725  0.2999492  6.080219
##   0.43287613  7.538147  0.2987751  6.089350
##   0.45348785  7.549132  0.2975475  6.098878
##   0.47508102  7.561098  0.2961721  6.109131
##   0.49770236  7.574111  0.2946375  6.120190
##   0.52140083  7.588144  0.2929471  6.132113
##   0.54622772  7.602226  0.2913395  6.144068
##   0.57223677  7.617139  0.2896297  6.156764
##   0.59948425  7.632158  0.2880013  6.169387
##   0.62802914  7.648292  0.2862101  6.182957
##   0.65793322  7.664646  0.2844942  6.196976
##   0.68926121  7.682064  0.2826550  6.211769
##   0.72208090  7.698666  0.2812269  6.225881
##   0.75646333  7.716358  0.2797040  6.240754
##   0.79248290  7.734477  0.2782931  6.256334
##   0.83021757  7.754216  0.2766752  6.273329
##   0.86974900  7.775774  0.2747959  6.291540
##   0.91116276  7.799365  0.2725869  6.311228
##   0.95454846  7.825172  0.2699797  6.332677
##   1.00000000  7.853396  0.2668897  6.355823
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.1023531.

##    alpha    lambda
## 51     1 0.1023531
##     alpha     lambda     RMSE  Rsquared      MAE    RMSESD RsquaredSD
## 1       1 0.01000000 7.464449 0.3034518 6.004609 0.1823117 0.02938006
## 2       1 0.01047616 7.463527 0.3035824 6.003898 0.1823606 0.02940169
## 3       1 0.01097499 7.462571 0.3037181 6.003163 0.1824108 0.02942529
## 4       1 0.01149757 7.461579 0.3038592 6.002399 0.1824634 0.02945040
## 5       1 0.01204504 7.460558 0.3040043 6.001614 0.1825086 0.02947758
## 6       1 0.01261857 7.459502 0.3041546 6.000800 0.1825530 0.02950639
## 7       1 0.01321941 7.458417 0.3043093 5.999965 0.1825982 0.02953661
## 8       1 0.01384886 7.457296 0.3044694 5.999116 0.1826443 0.02956821
## 9       1 0.01450829 7.456149 0.3046334 5.998246 0.1826881 0.02960123
## 10      1 0.01519911 7.454970 0.3048025 5.997346 0.1827300 0.02963513
## 11      1 0.01592283 7.453775 0.3049739 5.996445 0.1827699 0.02966987
## 12      1 0.01668101 7.452541 0.3051515 5.995511 0.1828114 0.02970680
## 13      1 0.01747528 7.451268 0.3053355 5.994569 0.1828329 0.02974460
## 14      1 0.01830738 7.449957 0.3055258 5.993594 0.1828511 0.02978421
## 15      1 0.01917910 7.448604 0.3057230 5.992612 0.1828504 0.02982308
## 16      1 0.02009233 7.447217 0.3059258 5.991626 0.1828436 0.02986228
## 17      1 0.02104904 7.445803 0.3061331 5.990635 0.1828364 0.02990182
## 18      1 0.02205131 7.444360 0.3063453 5.989632 0.1828315 0.02994339
## 19      1 0.02310130 7.442886 0.3065625 5.988628 0.1828431 0.02998457
## 20      1 0.02420128 7.441380 0.3067852 5.987606 0.1828583 0.03002489
## 21      1 0.02535364 7.439884 0.3070063 5.986590 0.1828744 0.03006408
## 22      1 0.02656088 7.438367 0.3072313 5.985547 0.1828817 0.03010398
## 23      1 0.02782559 7.436823 0.3074619 5.984460 0.1828611 0.03014051
## 24      1 0.02915053 7.435255 0.3076971 5.983371 0.1828382 0.03017851
## 25      1 0.03053856 7.433606 0.3079470 5.982248 0.1828746 0.03021809
## 26      1 0.03199267 7.431923 0.3082040 5.981112 0.1829263 0.03025847
## 27      1 0.03351603 7.430213 0.3084672 5.980009 0.1830048 0.03030471
## 28      1 0.03511192 7.428494 0.3087333 5.978877 0.1830902 0.03035434
## 29      1 0.03678380 7.426791 0.3089977 5.977699 0.1831859 0.03040740
## 30      1 0.03853529 7.425091 0.3092638 5.976526 0.1832831 0.03046708
## 31      1 0.04037017 7.423354 0.3095392 5.975334 0.1833731 0.03053253
## 32      1 0.04229243 7.421628 0.3098156 5.974183 0.1834686 0.03060361
## 33      1 0.04430621 7.419898 0.3100962 5.973065 0.1835397 0.03067942
## 34      1 0.04641589 7.418196 0.3103751 5.972014 0.1836275 0.03076035
## 35      1 0.04862602 7.416550 0.3106469 5.971039 0.1836975 0.03084000
## 36      1 0.05094138 7.414952 0.3109142 5.970117 0.1837804 0.03092564
## 37      1 0.05336699 7.413430 0.3111725 5.969324 0.1838640 0.03101066
## 38      1 0.05590810 7.411988 0.3114207 5.968709 0.1839565 0.03109628
## 39      1 0.05857021 7.410594 0.3116656 5.968206 0.1841148 0.03117669
## 40      1 0.06135907 7.409276 0.3119025 5.967867 0.1843067 0.03125984
## 41      1 0.06428073 7.407996 0.3121382 5.967566 0.1844550 0.03134515
## 42      1 0.06734151 7.406805 0.3123644 5.967352 0.1846283 0.03143565
## 43      1 0.07054802 7.405576 0.3126054 5.966991 0.1849114 0.03152103
## 44      1 0.07390722 7.404463 0.3128324 5.966643 0.1852236 0.03160909
## 45      1 0.07742637 7.403422 0.3130540 5.966333 0.1857055 0.03172026
## 46      1 0.08111308 7.402504 0.3132617 5.966059 0.1862178 0.03183648
## 47      1 0.08497534 7.401438 0.3135085 5.965552 0.1866739 0.03195561
## 48      1 0.08902151 7.400547 0.3137330 5.965180 0.1871606 0.03206951
## 49      1 0.09326033 7.399878 0.3139259 5.964900 0.1876182 0.03217063
## 50      1 0.09770100 7.399452 0.3140836 5.964749 0.1881332 0.03226223
## 51      1 0.10235310 7.399278 0.3142043 5.964759 0.1885725 0.03231641
## 52      1 0.10722672 7.399386 0.3142820 5.965066 0.1890776 0.03236830
## 53      1 0.11233240 7.399776 0.3143163 5.965509 0.1897822 0.03243721
## 54      1 0.11768120 7.400489 0.3143000 5.966081 0.1905579 0.03251459
## 55      1 0.12328467 7.401288 0.3142792 5.966719 0.1914794 0.03261423
## 56      1 0.12915497 7.402379 0.3142142 5.967652 0.1924716 0.03271497
## 57      1 0.13530478 7.403522 0.3141548 5.968801 0.1931947 0.03278157
## 58      1 0.14174742 7.404977 0.3140470 5.970301 0.1939614 0.03285622
## 59      1 0.14849683 7.406445 0.3139501 5.971794 0.1948310 0.03297872
## 60      1 0.15556761 7.408281 0.3137950 5.973572 0.1957634 0.03309324
## 61      1 0.16297508 7.410457 0.3135858 5.975574 0.1966061 0.03319619
## 62      1 0.17073526 7.413142 0.3132906 5.977999 0.1974188 0.03328370
## 63      1 0.17886495 7.416022 0.3129705 5.980744 0.1979886 0.03330405
## 64      1 0.18738174 7.419410 0.3125661 5.983762 0.1985552 0.03331806
## 65      1 0.19630407 7.423176 0.3121025 5.986993 0.1989084 0.03328917
## 66      1 0.20565123 7.427342 0.3115776 5.990519 0.1993589 0.03328486
## 67      1 0.21544347 7.431557 0.3110759 5.993916 0.1998382 0.03333961
## 68      1 0.22570197 7.436253 0.3104992 5.997585 0.2004985 0.03340102
## 69      1 0.23644894 7.441064 0.3099320 6.001454 0.2014673 0.03345638
## 70      1 0.24770764 7.446356 0.3092923 6.005923 0.2024712 0.03348147
## 71      1 0.25950242 7.451776 0.3086593 6.010998 0.2032402 0.03343499
## 72      1 0.27185882 7.457606 0.3079705 6.016452 0.2039853 0.03337921
## 73      1 0.28480359 7.463349 0.3073312 6.021865 0.2038948 0.03321952
## 74      1 0.29836472 7.469583 0.3066248 6.027712 0.2038513 0.03304578
## 75      1 0.31257158 7.476067 0.3059119 6.033847 0.2040406 0.03290499
## 76      1 0.32745492 7.483209 0.3051021 6.040534 0.2042943 0.03274038
## 77      1 0.34304693 7.491082 0.3041778 6.047657 0.2046334 0.03256129
## 78      1 0.35938137 7.499611 0.3031639 6.055306 0.2050452 0.03239436
## 79      1 0.37649358 7.508558 0.3021237 6.063291 0.2057133 0.03232777
## 80      1 0.39442061 7.518159 0.3009982 6.071765 0.2064371 0.03227731
## 81      1 0.41320124 7.527725 0.2999492 6.080219 0.2068702 0.03225363
## 82      1 0.43287613 7.538147 0.2987751 6.089350 0.2073673 0.03223544
## 83      1 0.45348785 7.549132 0.2975475 6.098878 0.2081198 0.03225696
## 84      1 0.47508102 7.561098 0.2961721 6.109131 0.2089836 0.03226808
## 85      1 0.49770236 7.574111 0.2946375 6.120190 0.2100132 0.03228237
## 86      1 0.52140083 7.588144 0.2929471 6.132113 0.2110386 0.03234454
## 87      1 0.54622772 7.602226 0.2913395 6.144068 0.2116355 0.03238539
## 88      1 0.57223677 7.617139 0.2896297 6.156764 0.2122962 0.03240501
## 89      1 0.59948425 7.632158 0.2880013 6.169387 0.2126796 0.03233211
## 90      1 0.62802914 7.648292 0.2862101 6.182957 0.2130703 0.03223548
## 91      1 0.65793322 7.664646 0.2844942 6.196976 0.2132749 0.03215346
## 92      1 0.68926121 7.682064 0.2826550 6.211769 0.2134535 0.03207349
## 93      1 0.72208090 7.698666 0.2812269 6.225881 0.2133113 0.03206962
## 94      1 0.75646333 7.716358 0.2797040 6.240754 0.2133166 0.03203257
## 95      1 0.79248290 7.734477 0.2782931 6.256334 0.2134687 0.03206183
## 96      1 0.83021757 7.754216 0.2766752 6.273329 0.2136688 0.03206200
## 97      1 0.86974900 7.775774 0.2747959 6.291540 0.2139198 0.03206281
## 98      1 0.91116276 7.799365 0.2725869 6.311228 0.2141975 0.03205742
## 99      1 0.95454846 7.825172 0.2699797 6.332677 0.2145037 0.03204387
## 100     1 1.00000000 7.853396 0.2668897 6.355823 0.2148399 0.03201967
##         MAESD
## 1   0.1616226
## 2   0.1616612
## 3   0.1616981
## 4   0.1617330
## 5   0.1617508
## 6   0.1617626
## 7   0.1617733
## 8   0.1617788
## 9   0.1617818
## 10  0.1617772
## 11  0.1617691
## 12  0.1617621
## 13  0.1617442
## 14  0.1617225
## 15  0.1617016
## 16  0.1616866
## 17  0.1616977
## 18  0.1617166
## 19  0.1617523
## 20  0.1617864
## 21  0.1618035
## 22  0.1618120
## 23  0.1618090
## 24  0.1617952
## 25  0.1618520
## 26  0.1618652
## 27  0.1618657
## 28  0.1618888
## 29  0.1619270
## 30  0.1619444
## 31  0.1619948
## 32  0.1620474
## 33  0.1621043
## 34  0.1621959
## 35  0.1622381
## 36  0.1622381
## 37  0.1621995
## 38  0.1621305
## 39  0.1620621
## 40  0.1619294
## 41  0.1617460
## 42  0.1615430
## 43  0.1614585
## 44  0.1614503
## 45  0.1615903
## 46  0.1618176
## 47  0.1620445
## 48  0.1622738
## 49  0.1625039
## 50  0.1627787
## 51  0.1630008
## 52  0.1631881
## 53  0.1634693
## 54  0.1638669
## 55  0.1642908
## 56  0.1646584
## 57  0.1646686
## 58  0.1646377
## 59  0.1644920
## 60  0.1642434
## 61  0.1640613
## 62  0.1638621
## 63  0.1634971
## 64  0.1630899
## 65  0.1623659
## 66  0.1617647
## 67  0.1612401
## 68  0.1608198
## 69  0.1606990
## 70  0.1604436
## 71  0.1601182
## 72  0.1596238
## 73  0.1584147
## 74  0.1572837
## 75  0.1561295
## 76  0.1549931
## 77  0.1538531
## 78  0.1528145
## 79  0.1522349
## 80  0.1516859
## 81  0.1509999
## 82  0.1503580
## 83  0.1499753
## 84  0.1496719
## 85  0.1495059
## 86  0.1491742
## 87  0.1483958
## 88  0.1478541
## 89  0.1473155
## 90  0.1467687
## 91  0.1458654
## 92  0.1451282
## 93  0.1444076
## 94  0.1436026
## 95  0.1427448
## 96  0.1418765
## 97  0.1411443
## 98  0.1403082
## 99  0.1394692
## 100 0.1387076

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   108.2   121.0   124.3   124.2   127.7   137.8 
## [1] "glmnet LASSO Test MSE: 94.1983462379039"

LARS with CV (w/ full train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.333 on full training set
## Least Angle Regression 
## 
## 6002 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE       Rsquared   MAE     
##   0.00000000  10.760090        NaN  8.200318
##   0.01010101  10.644307  0.1018765  8.116711
##   0.02020202  10.541440  0.1018765  8.043834
##   0.03030303  10.451868  0.1018765  7.979406
##   0.04040404  10.374603  0.1153701  7.923138
##   0.05050505  10.303565  0.1259936  7.870798
##   0.06060606  10.239769  0.1382632  7.821010
##   0.07070707  10.178252  0.1505818  7.771863
##   0.08080808  10.119445  0.1607048  7.724405
##   0.09090909  10.064007  0.1686065  7.679384
##   0.10101010  10.012462  0.1747299  7.636412
##   0.11111111   9.965469  0.1797679  7.595599
##   0.12121212   9.920291  0.1850407  7.556006
##   0.13131313   9.877367  0.1896411  7.518051
##   0.14141414   9.837288  0.1933784  7.481677
##   0.15151515   9.800091  0.1964029  7.446967
##   0.16161616   9.765809  0.1988408  7.414052
##   0.17171717   9.734471  0.2007969  7.382789
##   0.18181818   9.707031  0.2023281  7.354420
##   0.19191919   9.683714  0.2037882  7.329798
##   0.20202020   9.662740  0.2055124  7.308552
##   0.21212121   9.642596  0.2075154  7.288499
##   0.22222222   9.623261  0.2094977  7.269333
##   0.23232323   9.605266  0.2112882  7.251097
##   0.24242424   9.589823  0.2127114  7.234592
##   0.25252525   9.577284  0.2137853  7.220741
##   0.26262626   9.567319  0.2145960  7.209203
##   0.27272727   9.559074  0.2153219  7.199484
##   0.28282828   9.552480  0.2159356  7.191107
##   0.29292929   9.547636  0.2163381  7.184513
##   0.30303030   9.544099  0.2165834  7.178942
##   0.31313131   9.542030  0.2166326  7.174769
##   0.32323232   9.540843  0.2165788  7.171796
##   0.33333333   9.540558  0.2164054  7.169875
##   0.34343434   9.540949  0.2161423  7.168824
##   0.35353535   9.541685  0.2158501  7.168143
##   0.36363636   9.542771  0.2155205  7.167839
##   0.37373737   9.544135  0.2151582  7.167769
##   0.38383838   9.545849  0.2147481  7.167892
##   0.39393939   9.547581  0.2143498  7.168248
##   0.40404040   9.549239  0.2139802  7.168613
##   0.41414141   9.551035  0.2136003  7.169307
##   0.42424242   9.552932  0.2132138  7.170100
##   0.43434343   9.554817  0.2128378  7.170987
##   0.44444444   9.556632  0.2124806  7.171901
##   0.45454545   9.558571  0.2121098  7.172922
##   0.46464646   9.560609  0.2117300  7.173981
##   0.47474747   9.562570  0.2113693  7.175026
##   0.48484848   9.564582  0.2110048  7.176110
##   0.49494949   9.566544  0.2106523  7.177276
##   0.50505051   9.568559  0.2102970  7.178474
##   0.51515152   9.570615  0.2099403  7.179766
##   0.52525253   9.572593  0.2096007  7.181126
##   0.53535354   9.574487  0.2092799  7.182359
##   0.54545455   9.576411  0.2089592  7.183627
##   0.55555556   9.578445  0.2086246  7.185047
##   0.56565657   9.580555  0.2082827  7.186450
##   0.57575758   9.582658  0.2079475  7.187848
##   0.58585859   9.584794  0.2076118  7.189209
##   0.59595960   9.586900  0.2072845  7.190513
##   0.60606061   9.588995  0.2069631  7.191850
##   0.61616162   9.591098  0.2066453  7.193318
##   0.62626263   9.593307  0.2063147  7.194813
##   0.63636364   9.595596  0.2059760  7.196310
##   0.64646465   9.597862  0.2056447  7.197741
##   0.65656566   9.600103  0.2053224  7.199216
##   0.66666667   9.602319  0.2050078  7.200715
##   0.67676768   9.604642  0.2046808  7.202315
##   0.68686869   9.607088  0.2043382  7.203996
##   0.69696970   9.609600  0.2039894  7.205732
##   0.70707071   9.612191  0.2036323  7.207517
##   0.71717172   9.614767  0.2032820  7.209286
##   0.72727273   9.617403  0.2029275  7.211150
##   0.73737374   9.620087  0.2025700  7.213064
##   0.74747475   9.622847  0.2022054  7.214997
##   0.75757576   9.625664  0.2018366  7.216956
##   0.76767677   9.628562  0.2014593  7.219024
##   0.77777778   9.631507  0.2010795  7.221127
##   0.78787879   9.634541  0.2006898  7.223299
##   0.79797980   9.637578  0.2003024  7.225438
##   0.80808081   9.640655  0.1999123  7.227661
##   0.81818182   9.643788  0.1995173  7.229918
##   0.82828283   9.647044  0.1991070  7.232232
##   0.83838384   9.650297  0.1987014  7.234497
##   0.84848485   9.653542  0.1983013  7.236787
##   0.85858586   9.656778  0.1979059  7.239121
##   0.86868687   9.660096  0.1975018  7.241596
##   0.87878788   9.663445  0.1970971  7.244080
##   0.88888889   9.666838  0.1966901  7.246618
##   0.89898990   9.670262  0.1962825  7.249156
##   0.90909091   9.673715  0.1958745  7.251690
##   0.91919192   9.677250  0.1954585  7.254341
##   0.92929293   9.680834  0.1950394  7.257034
##   0.93939394   9.684472  0.1946168  7.259750
##   0.94949495   9.688139  0.1941937  7.262483
##   0.95959596   9.691820  0.1937723  7.265207
##   0.96969697   9.695535  0.1933504  7.267965
##   0.97979798   9.699277  0.1929287  7.270793
##   0.98989899   9.703077  0.1925023  7.273705
##   1.00000000   9.706892  0.1920777  7.276628
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.3333333.

##     fraction
## 34 0.3333333
## Warning: Removed 1 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   111.2   122.3   125.4   125.4   128.7   139.3 
## [1] "lars  Test MSE: 92.8032533211613"

LARS with CV (w/ filtered train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.505 on full training set
## Least Angle Regression 
## 
## 5714 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5142, 5142, 5143, 5143, 5143, 5143, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE      Rsquared   MAE     
##   0.00000000  8.923500        NaN  7.126370
##   0.01010101  8.791002  0.1410643  7.033091
##   0.02020202  8.672467  0.1410643  6.951806
##   0.03030303  8.570111  0.1450071  6.881497
##   0.04040404  8.477044  0.1680887  6.817882
##   0.05050505  8.390403  0.1825041  6.759232
##   0.06060606  8.311800  0.1915863  6.706277
##   0.07070707  8.240655  0.2058563  6.654840
##   0.08080808  8.171385  0.2206433  6.603761
##   0.09090909  8.105831  0.2323555  6.554359
##   0.10101010  8.044086  0.2415519  6.506722
##   0.11111111  7.987294  0.2492004  6.462118
##   0.12121212  7.934127  0.2566473  6.420588
##   0.13131313  7.882965  0.2634141  6.380017
##   0.14141414  7.834983  0.2689590  6.341073
##   0.15151515  7.790289  0.2734659  6.303893
##   0.16161616  7.748940  0.2771131  6.269106
##   0.17171717  7.711362  0.2800790  6.236754
##   0.18181818  7.678372  0.2830599  6.208614
##   0.19191919  7.647721  0.2863303  6.182525
##   0.20202020  7.619882  0.2894044  6.159149
##   0.21212121  7.593955  0.2922700  6.137084
##   0.22222222  7.568885  0.2952971  6.115827
##   0.23232323  7.546341  0.2978523  6.096510
##   0.24242424  7.526616  0.3000281  6.079271
##   0.25252525  7.508700  0.3020557  6.063574
##   0.26262626  7.492110  0.3040261  6.048681
##   0.27272727  7.478099  0.3056675  6.035932
##   0.28282828  7.466784  0.3069120  6.025210
##   0.29292929  7.457072  0.3080193  6.016047
##   0.30303030  7.448614  0.3090141  6.008069
##   0.31313131  7.441221  0.3099070  6.001365
##   0.32323232  7.434996  0.3106478  5.996494
##   0.33333333  7.429508  0.3113192  5.992302
##   0.34343434  7.424654  0.3119176  5.988401
##   0.35353535  7.420160  0.3124862  5.984594
##   0.36363636  7.416303  0.3129600  5.981081
##   0.37373737  7.412987  0.3133554  5.978048
##   0.38383838  7.410422  0.3136275  5.975765
##   0.39393939  7.408246  0.3138454  5.973816
##   0.40404040  7.406441  0.3140124  5.972127
##   0.41414141  7.405092  0.3141038  5.970753
##   0.42424242  7.404010  0.3141616  5.969635
##   0.43434343  7.402923  0.3142349  5.968502
##   0.44444444  7.401939  0.3142995  5.967502
##   0.45454545  7.401197  0.3143305  5.966716
##   0.46464646  7.400519  0.3143615  5.966079
##   0.47474747  7.399962  0.3143748  5.965699
##   0.48484848  7.399542  0.3143687  5.965358
##   0.49494949  7.399159  0.3143628  5.964959
##   0.50505051  7.398980  0.3143226  5.964675
##   0.51515152  7.399022  0.3142468  5.964540
##   0.52525253  7.399173  0.3141554  5.964563
##   0.53535354  7.399414  0.3140526  5.964674
##   0.54545455  7.399790  0.3139295  5.964855
##   0.55555556  7.400289  0.3137898  5.965050
##   0.56565657  7.400939  0.3136262  5.965315
##   0.57575758  7.401676  0.3134521  5.965640
##   0.58585859  7.402446  0.3132768  5.965952
##   0.59595960  7.403233  0.3131032  5.966264
##   0.60606061  7.403964  0.3129460  5.966426
##   0.61616162  7.404814  0.3127716  5.966711
##   0.62626263  7.405748  0.3125840  5.967004
##   0.63636364  7.406689  0.3124000  5.967234
##   0.64646465  7.407706  0.3122073  5.967487
##   0.65656566  7.408701  0.3120229  5.967663
##   0.66666667  7.409738  0.3118342  5.967859
##   0.67676768  7.410890  0.3116303  5.968204
##   0.68686869  7.412167  0.3114072  5.968702
##   0.69696970  7.413501  0.3111782  5.969318
##   0.70707071  7.414893  0.3109422  5.970052
##   0.71717172  7.416417  0.3106854  5.970927
##   0.72727273  7.417995  0.3104230  5.971867
##   0.73737374  7.419675  0.3101469  5.972883
##   0.74747475  7.421416  0.3098629  5.973986
##   0.75757576  7.423226  0.3095712  5.975202
##   0.76767677  7.425058  0.3092808  5.976469
##   0.77777778  7.426965  0.3089810  5.977812
##   0.78787879  7.428943  0.3086735  5.979155
##   0.79797980  7.431021  0.3083517  5.980512
##   0.80808081  7.433169  0.3080218  5.981940
##   0.81818182  7.435319  0.3076948  5.983413
##   0.82828283  7.437487  0.3073681  5.984931
##   0.83838384  7.439673  0.3070426  5.986451
##   0.84848485  7.441973  0.3067014  5.988008
##   0.85858586  7.444367  0.3063476  5.989651
##   0.86868687  7.446835  0.3059851  5.991359
##   0.87878788  7.449409  0.3056080  5.993203
##   0.88888889  7.452016  0.3052302  5.995139
##   0.89898990  7.454678  0.3048475  5.997139
##   0.90909091  7.457467  0.3044475  5.999248
##   0.91919192  7.460365  0.3040335  6.001467
##   0.92929293  7.463353  0.3036083  6.003766
##   0.93939394  7.466395  0.3031779  6.006162
##   0.94949495  7.469490  0.3027423  6.008644
##   0.95959596  7.472644  0.3023009  6.011197
##   0.96969697  7.475912  0.3018441  6.013844
##   0.97979798  7.479256  0.3013781  6.016565
##   0.98989899  7.482647  0.3009085  6.019337
##   1.00000000  7.486026  0.3004453  6.022099
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.5050505.

##     fraction
## 51 0.5050505
## Warning: Removed 1 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   108.2   121.0   124.3   124.2   127.7   137.8 
## [1] "lars  Test MSE: 94.1798045136605"